diff --git a/azure-mgmt-datalake-analytics/HISTORY.rst b/azure-mgmt-datalake-analytics/HISTORY.rst index 801d655e8ec0..2f68b4603f25 100644 --- a/azure-mgmt-datalake-analytics/HISTORY.rst +++ b/azure-mgmt-datalake-analytics/HISTORY.rst @@ -2,6 +2,60 @@ Release History =============== +0.2.0 (2017-07-25) +++++++++++++++++++ + +**Breaking changes** + +* Revised the inheritance structure for objects dealing with job creation, building, and retrieving. + + * NOTE: Only U-SQL is supported in this change; therefore, Hive is not supported. + * When submitting jobs, change JobInformation objects to CreateJobParameters. + + * When setting the properties for the CreateJobParameters object, be sure to change the USqlJobProperties object to a CreateUSqlJobProperties object. + + * When building jobs, change JobInformation objects to BuildJobParameters objects. + + * When setting the properties for the BuildJobParameters object, be sure to change the USqlJobProperties object to a CreateUSqlJobProperties object. + * NOTE: The following fields are not a part of the BuildJobParameters object: + + * degreeOfParallelism + * priority + * related + + * When getting a list of jobs, the object type that is returned is JobInformationBasic and not JobInformation (more information on the difference is below in the Notes section) + +* When getting a list of accounts, the object type that is returned is DataLakeAnalyticsAccountBasic and not DataLakeAnalyticsAccount (more information on the difference is below in the Notes section) + +**Notes** + +* When getting a list of jobs, the job information for each job now includes a strict subset of the job information that is returned when getting a single job + + * The following fields are included in the job information when getting a single job but are not included in the job information when getting a list of jobs: + + * errorMessage + * stateAuditRecords + * properties + + * runtimeVersion + * script + * type + +* When getting a list of accounts, the account information for each account now includes a strict subset of the account information that is returned when getting a single account + + * There are two ways to get a list of accounts: List and ListByResource methods + * The following fields are included in the account information when getting a list of accounts, which is less than the account information retrieved for a single account: + + * provisioningState + * state + * creationTime + * lastModifiedTime + * endpoint + +* When retrieving account information, an account id field called "accountId" is now included. + + * accountId's description: The unique identifier associated with this Data Lake Analytics account. + 0.1.6 (2017-06-19) ++++++++++++++++++ * Fixing a regression discovered in 0.1.5. Please update to 0.1.6 to avoid any issues caused by that regression. diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/__init__.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/__init__.py index 58562494e087..873f6b045f42 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/__init__.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/__init__.py @@ -21,6 +21,8 @@ from .update_storage_account_parameters import UpdateStorageAccountParameters from .compute_policy_create_or_update_parameters import ComputePolicyCreateOrUpdateParameters from .data_lake_analytics_account_update_parameters import DataLakeAnalyticsAccountUpdateParameters +from .data_lake_analytics_account_properties_basic import DataLakeAnalyticsAccountPropertiesBasic +from .data_lake_analytics_account_basic import DataLakeAnalyticsAccountBasic from .data_lake_analytics_account import DataLakeAnalyticsAccount from .update_firewall_rule_parameters import UpdateFirewallRuleParameters from .resource import Resource @@ -32,14 +34,14 @@ from .sas_token_info_paged import SasTokenInfoPaged from .storage_account_info_paged import StorageAccountInfoPaged from .data_lake_store_account_info_paged import DataLakeStoreAccountInfoPaged -from .data_lake_analytics_account_paged import DataLakeAnalyticsAccountPaged +from .data_lake_analytics_account_basic_paged import DataLakeAnalyticsAccountBasicPaged from .data_lake_analytics_account_management_client_enums import ( - DataLakeAnalyticsAccountStatus, - DataLakeAnalyticsAccountState, TierType, FirewallState, FirewallAllowAzureIpsState, AADObjectType, + DataLakeAnalyticsAccountStatus, + DataLakeAnalyticsAccountState, ) __all__ = [ @@ -55,6 +57,8 @@ 'UpdateStorageAccountParameters', 'ComputePolicyCreateOrUpdateParameters', 'DataLakeAnalyticsAccountUpdateParameters', + 'DataLakeAnalyticsAccountPropertiesBasic', + 'DataLakeAnalyticsAccountBasic', 'DataLakeAnalyticsAccount', 'UpdateFirewallRuleParameters', 'Resource', @@ -66,11 +70,11 @@ 'SasTokenInfoPaged', 'StorageAccountInfoPaged', 'DataLakeStoreAccountInfoPaged', - 'DataLakeAnalyticsAccountPaged', - 'DataLakeAnalyticsAccountStatus', - 'DataLakeAnalyticsAccountState', + 'DataLakeAnalyticsAccountBasicPaged', 'TierType', 'FirewallState', 'FirewallAllowAzureIpsState', 'AADObjectType', + 'DataLakeAnalyticsAccountStatus', + 'DataLakeAnalyticsAccountState', ] diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/compute_policy_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/compute_policy_paged.py index b78296478fff..3cd24ed75e18 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/compute_policy_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/compute_policy_paged.py @@ -14,7 +14,7 @@ class ComputePolicyPaged(Paged): """ - A paging container for iterating over a list of ComputePolicy object + A paging container for iterating over a list of :class:`ComputePolicy ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account.py index af895db94288..28708bcd73a6 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account.py @@ -39,6 +39,15 @@ class DataLakeAnalyticsAccount(Resource): include: 'Active', 'Suspended' :vartype state: str or :class:`DataLakeAnalyticsAccountState ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Analytics account. + :vartype account_id: str :param default_data_lake_store_account: the default data lake storage account associated with this Data Lake Analytics account. :type default_data_lake_store_account: str @@ -67,12 +76,6 @@ class DataLakeAnalyticsAccount(Resource): associated with this account. :type storage_accounts: list of :class:`StorageAccountInfo ` - :ivar creation_time: the account creation time. - :vartype creation_time: datetime - :ivar last_modified_time: the account last modified time. - :vartype last_modified_time: datetime - :ivar endpoint: the full CName endpoint for this account. - :vartype endpoint: str :param new_tier: the commitment tier for the next month. Possible values include: 'Consumption', 'Commitment_100AUHours', 'Commitment_500AUHours', 'Commitment_1000AUHours', 'Commitment_5000AUHours', @@ -123,6 +126,10 @@ class DataLakeAnalyticsAccount(Resource): 'location': {'required': True}, 'provisioning_state': {'readonly': True}, 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, 'default_data_lake_store_account': {'required': True}, 'max_degree_of_parallelism': {'minimum': 1}, 'query_store_retention': {'maximum': 180, 'minimum': 1}, @@ -130,9 +137,6 @@ class DataLakeAnalyticsAccount(Resource): 'system_max_degree_of_parallelism': {'readonly': True}, 'system_max_job_count': {'readonly': True}, 'data_lake_store_accounts': {'required': True}, - 'creation_time': {'readonly': True}, - 'last_modified_time': {'readonly': True}, - 'endpoint': {'readonly': True}, 'current_tier': {'readonly': True}, 'max_degree_of_parallelism_per_job': {'minimum': 1}, 'min_priority_per_job': {'minimum': 1}, @@ -146,6 +150,10 @@ class DataLakeAnalyticsAccount(Resource): 'tags': {'key': 'tags', 'type': '{str}'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'}, 'state': {'key': 'properties.state', 'type': 'DataLakeAnalyticsAccountState'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, 'default_data_lake_store_account': {'key': 'properties.defaultDataLakeStoreAccount', 'type': 'str'}, 'max_degree_of_parallelism': {'key': 'properties.maxDegreeOfParallelism', 'type': 'int'}, 'query_store_retention': {'key': 'properties.queryStoreRetention', 'type': 'int'}, @@ -154,9 +162,6 @@ class DataLakeAnalyticsAccount(Resource): 'system_max_job_count': {'key': 'properties.systemMaxJobCount', 'type': 'int'}, 'data_lake_store_accounts': {'key': 'properties.dataLakeStoreAccounts', 'type': '[DataLakeStoreAccountInfo]'}, 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[StorageAccountInfo]'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, 'new_tier': {'key': 'properties.newTier', 'type': 'TierType'}, 'current_tier': {'key': 'properties.currentTier', 'type': 'TierType'}, 'firewall_state': {'key': 'properties.firewallState', 'type': 'FirewallState'}, @@ -171,6 +176,10 @@ def __init__(self, location, default_data_lake_store_account, data_lake_store_ac super(DataLakeAnalyticsAccount, self).__init__(location=location, tags=tags) self.provisioning_state = None self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None self.default_data_lake_store_account = default_data_lake_store_account self.max_degree_of_parallelism = max_degree_of_parallelism self.query_store_retention = query_store_retention @@ -179,9 +188,6 @@ def __init__(self, location, default_data_lake_store_account, data_lake_store_ac self.system_max_job_count = None self.data_lake_store_accounts = data_lake_store_accounts self.storage_accounts = storage_accounts - self.creation_time = None - self.last_modified_time = None - self.endpoint = None self.new_tier = new_tier self.current_tier = None self.firewall_state = firewall_state diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic.py new file mode 100644 index 000000000000..61b694f43d55 --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class DataLakeAnalyticsAccountBasic(Resource): + """A Data Lake Analytics account object, containing all information associated + with the named Data Lake Analytics account. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :ivar name: Resource name + :vartype name: str + :ivar type: Resource type + :vartype type: str + :param location: Resource location + :type location: str + :param tags: Resource tags + :type tags: dict + :ivar provisioning_state: the provisioning status of the Data Lake + Analytics account. Possible values include: 'Failed', 'Creating', + 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', + 'Deleted' + :vartype provisioning_state: str or :class:`DataLakeAnalyticsAccountStatus + ` + :ivar state: the state of the Data Lake Analytics account. Possible values + include: 'Active', 'Suspended' + :vartype state: str or :class:`DataLakeAnalyticsAccountState + ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Analytics account. + :vartype account_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'}, + 'state': {'key': 'properties.state', 'type': 'DataLakeAnalyticsAccountState'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + } + + def __init__(self, location, tags=None): + super(DataLakeAnalyticsAccountBasic, self).__init__(location=location, tags=tags) + self.provisioning_state = None + self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic_paged.py similarity index 68% rename from azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_paged.py rename to azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic_paged.py index 8414cc6a7e5b..279867b941e2 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_basic_paged.py @@ -12,16 +12,16 @@ from msrest.paging import Paged -class DataLakeAnalyticsAccountPaged(Paged): +class DataLakeAnalyticsAccountBasicPaged(Paged): """ - A paging container for iterating over a list of DataLakeAnalyticsAccount object + A paging container for iterating over a list of :class:`DataLakeAnalyticsAccountBasic ` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[DataLakeAnalyticsAccount]'} + 'current_page': {'key': 'value', 'type': '[DataLakeAnalyticsAccountBasic]'} } def __init__(self, *args, **kwargs): - super(DataLakeAnalyticsAccountPaged, self).__init__(*args, **kwargs) + super(DataLakeAnalyticsAccountBasicPaged, self).__init__(*args, **kwargs) diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_management_client_enums.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_management_client_enums.py index 33ef3e81f897..ed4a500305c3 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_management_client_enums.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_management_client_enums.py @@ -12,25 +12,6 @@ from enum import Enum -class DataLakeAnalyticsAccountStatus(Enum): - - failed = "Failed" - creating = "Creating" - running = "Running" - succeeded = "Succeeded" - patching = "Patching" - suspending = "Suspending" - resuming = "Resuming" - deleting = "Deleting" - deleted = "Deleted" - - -class DataLakeAnalyticsAccountState(Enum): - - active = "Active" - suspended = "Suspended" - - class TierType(Enum): consumption = "Consumption" @@ -61,3 +42,22 @@ class AADObjectType(Enum): user = "User" group = "Group" service_principal = "ServicePrincipal" + + +class DataLakeAnalyticsAccountStatus(Enum): + + failed = "Failed" + creating = "Creating" + running = "Running" + succeeded = "Succeeded" + patching = "Patching" + suspending = "Suspending" + resuming = "Resuming" + deleting = "Deleting" + deleted = "Deleted" + + +class DataLakeAnalyticsAccountState(Enum): + + active = "Active" + suspended = "Suspended" diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_properties_basic.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_properties_basic.py new file mode 100644 index 000000000000..9cfae99acc4a --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_analytics_account_properties_basic.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class DataLakeAnalyticsAccountPropertiesBasic(Model): + """The basic account specific properties that are associated with an + underlying Data Lake Analytics account. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provisioning_state: the provisioning status of the Data Lake + Analytics account. Possible values include: 'Failed', 'Creating', + 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', + 'Deleted' + :vartype provisioning_state: str or :class:`DataLakeAnalyticsAccountStatus + ` + :ivar state: the state of the Data Lake Analytics account. Possible values + include: 'Active', 'Suspended' + :vartype state: str or :class:`DataLakeAnalyticsAccountState + ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Analytics account. + :vartype account_id: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'DataLakeAnalyticsAccountStatus'}, + 'state': {'key': 'state', 'type': 'DataLakeAnalyticsAccountState'}, + 'creation_time': {'key': 'creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'account_id': {'key': 'accountId', 'type': 'str'}, + } + + def __init__(self): + self.provisioning_state = None + self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_store_account_info_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_store_account_info_paged.py index 47a560ec3c20..e3f82dc99f38 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_store_account_info_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/data_lake_store_account_info_paged.py @@ -14,7 +14,7 @@ class DataLakeStoreAccountInfoPaged(Paged): """ - A paging container for iterating over a list of DataLakeStoreAccountInfo object + A paging container for iterating over a list of :class:`DataLakeStoreAccountInfo ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/firewall_rule_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/firewall_rule_paged.py index 90cd653ec5f8..20614418048d 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/firewall_rule_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/firewall_rule_paged.py @@ -14,7 +14,7 @@ class FirewallRulePaged(Paged): """ - A paging container for iterating over a list of FirewallRule object + A paging container for iterating over a list of :class:`FirewallRule ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/sas_token_info_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/sas_token_info_paged.py index d5c653acef0f..42eb70a31505 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/sas_token_info_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/sas_token_info_paged.py @@ -14,7 +14,7 @@ class SasTokenInfoPaged(Paged): """ - A paging container for iterating over a list of SasTokenInfo object + A paging container for iterating over a list of :class:`SasTokenInfo ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_account_info_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_account_info_paged.py index c7bae48ae50b..ed2c07040833 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_account_info_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_account_info_paged.py @@ -14,7 +14,7 @@ class StorageAccountInfoPaged(Paged): """ - A paging container for iterating over a list of StorageAccountInfo object + A paging container for iterating over a list of :class:`StorageAccountInfo ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container.py index 5a4c865a99df..7280a7e2f181 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container.py @@ -18,10 +18,10 @@ class StorageContainer(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar name: the name of the blob container. - :vartype name: str :ivar id: the unique identifier of the blob container. :vartype id: str + :ivar name: the name of the blob container. + :vartype name: str :ivar type: the type of the blob container. :vartype type: str :ivar last_modified_time: the last modified time of the blob container. @@ -29,21 +29,21 @@ class StorageContainer(Model): """ _validation = { - 'name': {'readonly': True}, 'id': {'readonly': True}, + 'name': {'readonly': True}, 'type': {'readonly': True}, 'last_modified_time': {'readonly': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, } def __init__(self): - self.name = None self.id = None + self.name = None self.type = None self.last_modified_time = None diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container_paged.py index b036e9e69d3c..c18b0555a334 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/models/storage_container_paged.py @@ -14,7 +14,7 @@ class StorageContainerPaged(Paged): """ - A paging container for iterating over a list of StorageContainer object + A paging container for iterating over a list of :class:`StorageContainer ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/account_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/account_operations.py index 826621c90f29..54ceca3684b0 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/account_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/account_operations.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError from msrestazure.azure_operation import AzureOperationPoller -import uuid from .. import models @@ -69,8 +69,11 @@ def list_by_resource_group( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`DataLakeAnalyticsAccountPaged - ` + :return: An iterator like instance of + :class:`DataLakeAnalyticsAccountBasic + ` + :rtype: :class:`DataLakeAnalyticsAccountBasicPaged + ` :raises: :class:`CloudError` """ def internal_paging(next_link=None, raw=False): @@ -127,11 +130,11 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.DataLakeAnalyticsAccountPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.DataLakeAnalyticsAccountPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized @@ -166,8 +169,11 @@ def list( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`DataLakeAnalyticsAccountPaged - ` + :return: An iterator like instance of + :class:`DataLakeAnalyticsAccountBasic + ` + :rtype: :class:`DataLakeAnalyticsAccountBasicPaged + ` :raises: :class:`CloudError` """ def internal_paging(next_link=None, raw=False): @@ -223,34 +229,43 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.DataLakeAnalyticsAccountPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.DataLakeAnalyticsAccountPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.DataLakeAnalyticsAccountBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized - def get( - self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): - """Gets details of the specified Data Lake Analytics account. + def create( + self, resource_group_name, account_name, parameters, custom_headers=None, raw=False, **operation_config): + """Creates the specified Data Lake Analytics account. This supplies the + user with computation services for Data Lake Analytics workloads. :param resource_group_name: The name of the Azure resource group that - contains the Data Lake Analytics account. + contains the Data Lake Analytics account.the account will be + associated with. :type resource_group_name: str :param account_name: The name of the Data Lake Analytics account to - retrieve. + create. :type account_name: str + :param parameters: Parameters supplied to the create Data Lake + Analytics account operation. + :type parameters: :class:`DataLakeAnalyticsAccount + ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :rtype: :class:`DataLakeAnalyticsAccount + :return: + :class:`AzureOperationPoller` + instance that returns :class:`DataLakeAnalyticsAccount ` - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` if + raw=true + :rtype: + :class:`AzureOperationPoller` + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -276,45 +291,82 @@ def get( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsAccount') + # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, **operation_config) + def long_running_send(): - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp + request = self._client.put(url, query_parameters) + return self._client.send( + request, header_parameters, body_content, **operation_config) - deserialized = None + def get_long_running_status(status_link, headers=None): - if response.status_code == 200: - deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + request = self._client.get(status_link) + if headers: + request.headers.update(headers) + return self._client.send( + request, header_parameters, **operation_config) + + def get_long_running_output(response): + + if response.status_code not in [200, 201]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + if response.status_code == 201: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response + response = long_running_send() + return get_long_running_output(response) - return deserialized + long_running_operation_timeout = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + return AzureOperationPoller( + long_running_send, get_long_running_output, + get_long_running_status, long_running_operation_timeout) - def delete( - self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): - """Begins the delete delete process for the Data Lake Analytics account - object specified by the account name. + def update( + self, resource_group_name, account_name, parameters=None, custom_headers=None, raw=False, **operation_config): + """Updates the Data Lake Analytics account object specified by the + accountName with the contents of the account object. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str :param account_name: The name of the Data Lake Analytics account to - delete + update. :type account_name: str + :param parameters: Parameters supplied to the update Data Lake + Analytics account operation. + :type parameters: :class:`DataLakeAnalyticsAccountUpdateParameters + ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response + :return: + :class:`AzureOperationPoller` + instance that returns :class:`DataLakeAnalyticsAccount + ` + or :class:`ClientRawResponse` if + raw=true :rtype: :class:`AzureOperationPoller` - instance that returns None - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -340,11 +392,18 @@ def delete( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsAccountUpdateParameters') + else: + body_content = None + # Construct and send request def long_running_send(): - request = self._client.delete(url, query_parameters) - return self._client.send(request, header_parameters, **operation_config) + request = self._client.patch(url, query_parameters) + return self._client.send( + request, header_parameters, body_content, **operation_config) def get_long_running_status(status_link, headers=None): @@ -356,15 +415,24 @@ def get_long_running_status(status_link, headers=None): def get_long_running_output(response): - if response.status_code not in [200, 202, 204]: + if response.status_code not in [200, 201]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + if response.status_code == 201: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + if raw: - client_raw_response = ClientRawResponse(None, response) + client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response + return deserialized + if raw: response = long_running_send() return get_long_running_output(response) @@ -376,31 +444,28 @@ def get_long_running_output(response): long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout) - def create( - self, resource_group_name, account_name, parameters, custom_headers=None, raw=False, **operation_config): - """Creates the specified Data Lake Analytics account. This supplies the - user with computation services for Data Lake Analytics workloads. + def delete( + self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): + """Begins the delete process for the Data Lake Analytics account object + specified by the account name. :param resource_group_name: The name of the Azure resource group that - contains the Data Lake Analytics account.the account will be - associated with. + contains the Data Lake Analytics account. :type resource_group_name: str :param account_name: The name of the Data Lake Analytics account to - create. + delete :type account_name: str - :param parameters: Parameters supplied to the create Data Lake - Analytics account operation. - :type parameters: :class:`DataLakeAnalyticsAccount - ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response + :return: + :class:`AzureOperationPoller` + instance that returns None or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`AzureOperationPoller` - instance that returns :class:`DataLakeAnalyticsAccount - ` - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -426,15 +491,11 @@ def create( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - body_content = self._serialize.body(parameters, 'DataLakeAnalyticsAccount') - # Construct and send request def long_running_send(): - request = self._client.put(url, query_parameters) - return self._client.send( - request, header_parameters, body_content, **operation_config) + request = self._client.delete(url, query_parameters) + return self._client.send(request, header_parameters, **operation_config) def get_long_running_status(status_link, headers=None): @@ -446,24 +507,15 @@ def get_long_running_status(status_link, headers=None): def get_long_running_output(response): - if response.status_code not in [201, 200]: + if response.status_code not in [200, 202, 204]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp - deserialized = None - - if response.status_code == 201: - deserialized = self._deserialize('DataLakeAnalyticsAccount', response) - if response.status_code == 200: - deserialized = self._deserialize('DataLakeAnalyticsAccount', response) - if raw: - client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response = ClientRawResponse(None, response) return client_raw_response - return deserialized - if raw: response = long_running_send() return get_long_running_output(response) @@ -475,30 +527,28 @@ def get_long_running_output(response): long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout) - def update( - self, resource_group_name, account_name, parameters=None, custom_headers=None, raw=False, **operation_config): - """Updates the Data Lake Analytics account object specified by the - accountName with the contents of the account object. + def get( + self, resource_group_name, account_name, custom_headers=None, raw=False, **operation_config): + """Gets details of the specified Data Lake Analytics account. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str :param account_name: The name of the Data Lake Analytics account to - update. + retrieve. :type account_name: str - :param parameters: Parameters supplied to the update Data Lake - Analytics account operation. - :type parameters: :class:`DataLakeAnalyticsAccountUpdateParameters - ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response - :rtype: - :class:`AzureOperationPoller` - instance that returns :class:`DataLakeAnalyticsAccount + :param operation_config: :ref:`Operation configuration + overrides`. + :return: :class:`DataLakeAnalyticsAccount ` - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`DataLakeAnalyticsAccount + ` + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -524,54 +574,22 @@ def update( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - if parameters is not None: - body_content = self._serialize.body(parameters, 'DataLakeAnalyticsAccountUpdateParameters') - else: - body_content = None - # Construct and send request - def long_running_send(): - - request = self._client.patch(url, query_parameters) - return self._client.send( - request, header_parameters, body_content, **operation_config) - - def get_long_running_status(status_link, headers=None): - - request = self._client.get(status_link) - if headers: - request.headers.update(headers) - return self._client.send( - request, header_parameters, **operation_config) - - def get_long_running_output(response): - - if response.status_code not in [200, 201]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) - if response.status_code == 200: - deserialized = self._deserialize('DataLakeAnalyticsAccount', response) - if response.status_code == 201: - deserialized = self._deserialize('DataLakeAnalyticsAccount', response) + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response + deserialized = None - return deserialized + if response.status_code == 200: + deserialized = self._deserialize('DataLakeAnalyticsAccount', response) if raw: - response = long_running_send() - return get_long_running_output(response) + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response - long_running_operation_timeout = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - return AzureOperationPoller( - long_running_send, get_long_running_output, - get_long_running_status, long_running_operation_timeout) + return deserialized diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/compute_policies_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/compute_policies_operations.py index 2d229e429bdf..1dae436a6aff 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/compute_policies_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/compute_policies_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -60,10 +60,13 @@ def create_or_update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`ComputePolicy - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -137,10 +140,13 @@ def update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`ComputePolicy - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = None @@ -216,9 +222,11 @@ def delete( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -276,10 +284,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`ComputePolicy + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`ComputePolicy - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -342,6 +353,8 @@ def list_by_account( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`ComputePolicy + ` :rtype: :class:`ComputePolicyPaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/data_lake_store_accounts_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/data_lake_store_accounts_operations.py index de8aab91ce5e..e7146df9c79e 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/data_lake_store_accounts_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/data_lake_store_accounts_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -35,31 +35,38 @@ def __init__(self, client, config, serializer, deserializer): self.config = config - def get( - self, resource_group_name, account_name, data_lake_store_account_name, custom_headers=None, raw=False, **operation_config): - """Gets the specified Data Lake Store account details in the specified - Data Lake Analytics account. + def add( + self, resource_group_name, account_name, data_lake_store_account_name, suffix=None, custom_headers=None, raw=False, **operation_config): + """Updates the specified Data Lake Analytics account to include the + additional Data Lake Store account. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str - :param account_name: The name of the Data Lake Analytics account from - which to retrieve the Data Lake Store account details. + :param account_name: The name of the Data Lake Analytics account to + which to add the Data Lake Store account. :type account_name: str :param data_lake_store_account_name: The name of the Data Lake Store - account to retrieve + account to add. :type data_lake_store_account_name: str + :param suffix: the optional suffix for the Data Lake Store account. + :type suffix: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`DataLakeStoreAccountInfo - ` - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ + parameters = None + if suffix is not None: + parameters = models.AddDataLakeStoreParameters(suffix=suffix) + # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/DataLakeStoreAccounts/{dataLakeStoreAccountName}' path_format_arguments = { @@ -84,26 +91,26 @@ def get( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'AddDataLakeStoreParameters') + else: + body_content = None + # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('DataLakeStoreAccountInfo', response) - if raw: - client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response = ClientRawResponse(None, response) return client_raw_response - return deserialized - def delete( self, resource_group_name, account_name, data_lake_store_account_name, custom_headers=None, raw=False, **operation_config): """Updates the Data Lake Analytics account specified to remove the @@ -123,9 +130,11 @@ def delete( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -165,36 +174,34 @@ def delete( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def add( - self, resource_group_name, account_name, data_lake_store_account_name, suffix=None, custom_headers=None, raw=False, **operation_config): - """Updates the specified Data Lake Analytics account to include the - additional Data Lake Store account. + def get( + self, resource_group_name, account_name, data_lake_store_account_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Data Lake Store account details in the specified + Data Lake Analytics account. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str - :param account_name: The name of the Data Lake Analytics account to - which to add the Data Lake Store account. + :param account_name: The name of the Data Lake Analytics account from + which to retrieve the Data Lake Store account details. :type account_name: str :param data_lake_store_account_name: The name of the Data Lake Store - account to add. + account to retrieve :type data_lake_store_account_name: str - :param suffix: the optional suffix for the Data Lake Store account. - :type suffix: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: :class:`DataLakeStoreAccountInfo + ` + or :class:`ClientRawResponse` if + raw=true + :rtype: :class:`DataLakeStoreAccountInfo + ` + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ - parameters = None - if suffix is not None: - parameters = models.AddDataLakeStoreParameters(suffix=suffix) - # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/DataLakeStoreAccounts/{dataLakeStoreAccountName}' path_format_arguments = { @@ -219,26 +226,26 @@ def add( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - if parameters is not None: - body_content = self._serialize.body(parameters, 'AddDataLakeStoreParameters') - else: - body_content = None - # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('DataLakeStoreAccountInfo', response) + if raw: - client_raw_response = ClientRawResponse(None, response) + client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response + return deserialized + def list_by_account( self, resource_group_name, account_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): """Gets the first page of Data Lake Store accounts linked to the specified @@ -276,6 +283,8 @@ def list_by_account( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`DataLakeStoreAccountInfo + ` :rtype: :class:`DataLakeStoreAccountInfoPaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/firewall_rules_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/firewall_rules_operations.py index 5f59d12de725..abe419d78f83 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/firewall_rules_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/firewall_rules_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -59,10 +59,13 @@ def create_or_update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`FirewallRule - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -138,10 +141,13 @@ def update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`FirewallRule - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = None @@ -217,9 +223,11 @@ def delete( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -276,10 +284,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`FirewallRule - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -342,6 +353,8 @@ def list_by_account( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`FirewallRule + ` :rtype: :class:`FirewallRulePaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/storage_accounts_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/storage_accounts_operations.py index 8df2c5601f2f..76481dbb5295 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/storage_accounts_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/account/operations/storage_accounts_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -35,31 +35,39 @@ def __init__(self, client, config, serializer, deserializer): self.config = config - def get( - self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): - """Gets the specified Azure Storage account linked to the given Data Lake - Analytics account. + def add( + self, resource_group_name, account_name, storage_account_name, access_key, suffix=None, custom_headers=None, raw=False, **operation_config): + """Updates the specified Data Lake Analytics account to add an Azure + Storage account. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str - :param account_name: The name of the Data Lake Analytics account from - which to retrieve Azure storage account details. + :param account_name: The name of the Data Lake Analytics account to + which to add the Azure Storage account. :type account_name: str - :param storage_account_name: The name of the Azure Storage account for - which to retrieve the details. + :param storage_account_name: The name of the Azure Storage account to + add :type storage_account_name: str + :param access_key: the access key associated with this Azure Storage + account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`StorageAccountInfo - ` - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ + parameters = models.AddStorageAccountParameters(access_key=access_key, suffix=suffix) + # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' path_format_arguments = { @@ -84,50 +92,57 @@ def get( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + body_content = self._serialize.body(parameters, 'AddStorageAccountParameters') + # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('StorageAccountInfo', response) - if raw: - client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response = ClientRawResponse(None, response) return client_raw_response - return deserialized - - def delete( - self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): - """Updates the specified Data Lake Analytics account to remove an Azure - Storage account. + def update( + self, resource_group_name, account_name, storage_account_name, access_key=None, suffix=None, custom_headers=None, raw=False, **operation_config): + """Updates the Data Lake Analytics account to replace Azure Storage blob + account details, such as the access key and/or suffix. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str - :param account_name: The name of the Data Lake Analytics account from - which to remove the Azure Storage account. + :param account_name: The name of the Data Lake Analytics account to + modify storage accounts in :type account_name: str - :param storage_account_name: The name of the Azure Storage account to - remove + :param storage_account_name: The Azure Storage account to modify :type storage_account_name: str + :param access_key: the updated access key associated with this Azure + Storage account that will be used to connect to it. + :type access_key: str + :param suffix: the optional suffix for the storage account. + :type suffix: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ + parameters = None + if access_key is not None or suffix is not None: + parameters = models.UpdateStorageAccountParameters(access_key=access_key, suffix=suffix) + # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' path_format_arguments = { @@ -152,9 +167,16 @@ def delete( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'UpdateStorageAccountParameters') + else: + body_content = None + # Construct and send request - request = self._client.delete(url, query_parameters) - response = self._client.send(request, header_parameters, **operation_config) + request = self._client.patch(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -165,38 +187,32 @@ def delete( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def update( - self, resource_group_name, account_name, storage_account_name, access_key=None, suffix=None, custom_headers=None, raw=False, **operation_config): - """Updates the Data Lake Analytics account to replace Azure Storage blob - account details, such as the access key and/or suffix. + def delete( + self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): + """Updates the specified Data Lake Analytics account to remove an Azure + Storage account. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str - :param account_name: The name of the Data Lake Analytics account to - modify storage accounts in + :param account_name: The name of the Data Lake Analytics account from + which to remove the Azure Storage account. :type account_name: str - :param storage_account_name: The Azure Storage account to modify + :param storage_account_name: The name of the Azure Storage account to + remove :type storage_account_name: str - :param access_key: the updated access key associated with this Azure - Storage account that will be used to connect to it. - :type access_key: str - :param suffix: the optional suffix for the storage account. - :type suffix: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ - parameters = None - if access_key is not None or suffix is not None: - parameters = models.UpdateStorageAccountParameters(access_key=access_key, suffix=suffix) - # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' path_format_arguments = { @@ -221,16 +237,9 @@ def update( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - if parameters is not None: - body_content = self._serialize.body(parameters, 'UpdateStorageAccountParameters') - else: - body_content = None - # Construct and send request - request = self._client.patch(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, **operation_config) + request = self._client.delete(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -241,37 +250,34 @@ def update( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def add( - self, resource_group_name, account_name, storage_account_name, access_key, suffix=None, custom_headers=None, raw=False, **operation_config): - """Updates the specified Data Lake Analytics account to add an Azure - Storage account. + def get( + self, resource_group_name, account_name, storage_account_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified Azure Storage account linked to the given Data Lake + Analytics account. :param resource_group_name: The name of the Azure resource group that contains the Data Lake Analytics account. :type resource_group_name: str - :param account_name: The name of the Data Lake Analytics account to - which to add the Azure Storage account. + :param account_name: The name of the Data Lake Analytics account from + which to retrieve Azure storage account details. :type account_name: str - :param storage_account_name: The name of the Azure Storage account to - add + :param storage_account_name: The name of the Azure Storage account for + which to retrieve the details. :type storage_account_name: str - :param access_key: the access key associated with this Azure Storage - account that will be used to connect to it. - :type access_key: str - :param suffix: the optional suffix for the storage account. - :type suffix: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: :class:`StorageAccountInfo + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`StorageAccountInfo + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ - parameters = models.AddStorageAccountParameters(access_key=access_key, suffix=suffix) - # Construct URL url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/StorageAccounts/{storageAccountName}' path_format_arguments = { @@ -296,23 +302,26 @@ def add( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - body_content = self._serialize.body(parameters, 'AddStorageAccountParameters') - # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('StorageAccountInfo', response) + if raw: - client_raw_response = ClientRawResponse(None, response) + client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response + return deserialized + def get_storage_container( self, resource_group_name, account_name, storage_account_name, container_name, custom_headers=None, raw=False, **operation_config): """Gets the specified Azure Storage container associated with the given @@ -335,10 +344,13 @@ def get_storage_container( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`StorageContainer + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`StorageContainer - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -406,6 +418,8 @@ def list_storage_containers( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`StorageContainer + ` :rtype: :class:`StorageContainerPaged ` :raises: :class:`CloudError` @@ -485,6 +499,8 @@ def list_sas_tokens( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`SasTokenInfo + ` :rtype: :class:`SasTokenInfoPaged ` :raises: :class:`CloudError` @@ -580,6 +596,8 @@ def list_by_account( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`StorageAccountInfo + ` :rtype: :class:`StorageAccountInfoPaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr_paged.py index 3e406d86ce07..caae702d9f0d 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_assembly_clr_paged.py @@ -14,7 +14,7 @@ class USqlAssemblyClrPaged(Paged): """ - A paging container for iterating over a list of USqlAssemblyClr object + A paging container for iterating over a list of :class:`USqlAssemblyClr ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_credential_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_credential_paged.py index 0d1e2a26e39a..fd7b50a9ae63 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_credential_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_credential_paged.py @@ -14,7 +14,7 @@ class USqlCredentialPaged(Paged): """ - A paging container for iterating over a list of USqlCredential object + A paging container for iterating over a list of :class:`USqlCredential ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_database_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_database_paged.py index cfe41ac64bc8..acebb8500686 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_database_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_database_paged.py @@ -14,7 +14,7 @@ class USqlDatabasePaged(Paged): """ - A paging container for iterating over a list of USqlDatabase object + A paging container for iterating over a list of :class:`USqlDatabase ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_external_data_source_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_external_data_source_paged.py index 1014de319a3c..fc5cf4ae6b1d 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_external_data_source_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_external_data_source_paged.py @@ -14,7 +14,7 @@ class USqlExternalDataSourcePaged(Paged): """ - A paging container for iterating over a list of USqlExternalDataSource object + A paging container for iterating over a list of :class:`USqlExternalDataSource ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_package_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_package_paged.py index ca10b732c9c4..2d8c3977a4f2 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_package_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_package_paged.py @@ -14,7 +14,7 @@ class USqlPackagePaged(Paged): """ - A paging container for iterating over a list of USqlPackage object + A paging container for iterating over a list of :class:`USqlPackage ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_procedure_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_procedure_paged.py index ca4905b5f863..61b23707c7e1 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_procedure_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_procedure_paged.py @@ -14,7 +14,7 @@ class USqlProcedurePaged(Paged): """ - A paging container for iterating over a list of USqlProcedure object + A paging container for iterating over a list of :class:`USqlProcedure ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_schema_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_schema_paged.py index 655f5db3c90f..f8f8d2386192 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_schema_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_schema_paged.py @@ -14,7 +14,7 @@ class USqlSchemaPaged(Paged): """ - A paging container for iterating over a list of USqlSchema object + A paging container for iterating over a list of :class:`USqlSchema ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_paged.py index 3ab712ce04c2..b96cd3580b11 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_paged.py @@ -14,7 +14,7 @@ class USqlTablePaged(Paged): """ - A paging container for iterating over a list of USqlTable object + A paging container for iterating over a list of :class:`USqlTable ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_partition_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_partition_paged.py index a30dc4a4e595..2b0f1db6ac47 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_partition_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_partition_paged.py @@ -14,7 +14,7 @@ class USqlTablePartitionPaged(Paged): """ - A paging container for iterating over a list of USqlTablePartition object + A paging container for iterating over a list of :class:`USqlTablePartition ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_statistics_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_statistics_paged.py index c319b98e0d2d..47fa3fd26bc2 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_statistics_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_statistics_paged.py @@ -14,7 +14,7 @@ class USqlTableStatisticsPaged(Paged): """ - A paging container for iterating over a list of USqlTableStatistics object + A paging container for iterating over a list of :class:`USqlTableStatistics ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_type_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_type_paged.py index 4df65670effb..9cda3f159c89 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_type_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_type_paged.py @@ -14,7 +14,7 @@ class USqlTableTypePaged(Paged): """ - A paging container for iterating over a list of USqlTableType object + A paging container for iterating over a list of :class:`USqlTableType ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_valued_function_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_valued_function_paged.py index 2d01b5dbde07..36065b69b525 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_valued_function_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table_valued_function_paged.py @@ -14,7 +14,7 @@ class USqlTableValuedFunctionPaged(Paged): """ - A paging container for iterating over a list of USqlTableValuedFunction object + A paging container for iterating over a list of :class:`USqlTableValuedFunction ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_type_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_type_paged.py index 53c3edfe105d..f079ab674366 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_type_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_type_paged.py @@ -14,7 +14,7 @@ class USqlTypePaged(Paged): """ - A paging container for iterating over a list of USqlType object + A paging container for iterating over a list of :class:`USqlType ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_view_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_view_paged.py index 78c2823f5613..5536566708a4 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_view_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_view_paged.py @@ -14,7 +14,7 @@ class USqlViewPaged(Paged): """ - A paging container for iterating over a list of USqlView object + A paging container for iterating over a list of :class:`USqlView ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/operations/catalog_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/operations/catalog_operations.py index 02a37632c32f..f4b47017681e 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/operations/catalog_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/operations/catalog_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -59,9 +59,11 @@ def create_secret( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = models.DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters(password=password, uri=uri) @@ -130,9 +132,11 @@ def update_secret( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = models.DataLakeAnalyticsCatalogSecretCreateOrUpdateParameters(password=password, uri=uri) @@ -178,28 +182,29 @@ def update_secret( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def get_secret( + def delete_secret( self, account_name, database_name, secret_name, custom_headers=None, raw=False, **operation_config): - """Gets the specified secret in the specified database. This is deprecated - and will be removed in the next release. Please use GetCredential - instead. + """Deletes the specified secret in the specified database. This is + deprecated and will be removed in the next release. Please use + DeleteCredential instead. :param account_name: The Azure Data Lake Analytics account upon which to execute catalog operations. :type account_name: str :param database_name: The name of the database containing the secret. :type database_name: str - :param secret_name: The name of the secret to get + :param secret_name: The name of the secret to delete :type secret_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`USqlSecret - ` - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -227,7 +232,7 @@ def get_secret( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.get(url, query_parameters) + request = self._client.delete(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: @@ -235,38 +240,35 @@ def get_secret( exp.request_id = response.headers.get('x-ms-request-id') raise exp - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('USqlSecret', response) - if raw: - client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response = ClientRawResponse(None, response) return client_raw_response - return deserialized - - def delete_secret( + def get_secret( self, account_name, database_name, secret_name, custom_headers=None, raw=False, **operation_config): - """Deletes the specified secret in the specified database. This is - deprecated and will be removed in the next release. Please use - DeleteCredential instead. + """Gets the specified secret in the specified database. This is deprecated + and will be removed in the next release. Please use GetCredential + instead. :param account_name: The Azure Data Lake Analytics account upon which to execute catalog operations. :type account_name: str :param database_name: The name of the database containing the secret. :type database_name: str - :param secret_name: The name of the secret to delete + :param secret_name: The name of the secret to get :type secret_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: :class:`USqlSecret + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlSecret + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -294,7 +296,7 @@ def delete_secret( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters) + request = self._client.get(url, query_parameters) response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: @@ -302,10 +304,17 @@ def delete_secret( exp.request_id = response.headers.get('x-ms-request-id') raise exp + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlSecret', response) + if raw: - client_raw_response = ClientRawResponse(None, response) + client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response + return deserialized + def delete_all_secrets( self, account_name, database_name, custom_headers=None, raw=False, **operation_config): """Deletes all secrets in the specified database. This is deprecated and @@ -322,9 +331,11 @@ def delete_all_secrets( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -388,9 +399,11 @@ def create_credential( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -457,9 +470,11 @@ def update_credential( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -503,29 +518,43 @@ def update_credential( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def get_credential( - self, account_name, database_name, credential_name, custom_headers=None, raw=False, **operation_config): - """Retrieves the specified credential from the Data Lake Analytics - catalog. + def delete_credential( + self, account_name, database_name, credential_name, cascade=False, password=None, custom_headers=None, raw=False, **operation_config): + """Deletes the specified credential in the specified database. :param account_name: The Azure Data Lake Analytics account upon which to execute catalog operations. :type account_name: str - :param database_name: The name of the database containing the schema. + :param database_name: The name of the database containing the + credential. :type database_name: str - :param credential_name: The name of the credential. + :param credential_name: The name of the credential to delete :type credential_name: str + :param cascade: Indicates if the delete should be a cascading delete + (which deletes all resources dependent on the credential as well as + the credential) or not. If false will fail if there are any resources + relying on the credential. + :type cascade: bool + :param password: the current password for the credential and user with + access to the data source. This is required if the requester is not + the account owner. + :type password: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`USqlCredential - ` - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ + parameters = None + if password is not None: + parameters = models.DataLakeAnalyticsCatalogCredentialDeleteParameters(password=password) + # Construct URL url = '/catalog/usql/databases/{databaseName}/credentials/{credentialName}' path_format_arguments = { @@ -538,6 +567,8 @@ def get_credential( # Construct parameters query_parameters = {} + if cascade is not None: + query_parameters['cascade'] = self._serialize.query("cascade", cascade, 'bool') query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers @@ -550,61 +581,52 @@ def get_credential( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogCredentialDeleteParameters') + else: + body_content = None + # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, **operation_config) + request = self._client.post(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp - deserialized = None - - if response.status_code == 200: - deserialized = self._deserialize('USqlCredential', response) - if raw: - client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response = ClientRawResponse(None, response) return client_raw_response - return deserialized - - def delete_credential( - self, account_name, database_name, credential_name, cascade=False, password=None, custom_headers=None, raw=False, **operation_config): - """Deletes the specified credential in the specified database. + def get_credential( + self, account_name, database_name, credential_name, custom_headers=None, raw=False, **operation_config): + """Retrieves the specified credential from the Data Lake Analytics + catalog. :param account_name: The Azure Data Lake Analytics account upon which to execute catalog operations. :type account_name: str - :param database_name: The name of the database containing the - credential. + :param database_name: The name of the database containing the schema. :type database_name: str - :param credential_name: The name of the credential to delete + :param credential_name: The name of the credential. :type credential_name: str - :param cascade: Indicates if the delete should be a cascading delete - (which deletes all resources dependent on the credential as well as - the credential) or not. If false will fail if there are any resources - relying on the credential. - :type cascade: bool - :param password: the current password for the credential and user with - access to the data source. This is required if the requester is not - the account owner. - :type password: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: :class:`USqlCredential + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: :class:`USqlCredential + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ - parameters = None - if password is not None: - parameters = models.DataLakeAnalyticsCatalogCredentialDeleteParameters(password=password) - # Construct URL url = '/catalog/usql/databases/{databaseName}/credentials/{credentialName}' path_format_arguments = { @@ -617,8 +639,6 @@ def delete_credential( # Construct parameters query_parameters = {} - if cascade is not None: - query_parameters['cascade'] = self._serialize.query("cascade", cascade, 'bool') query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') # Construct headers @@ -631,26 +651,26 @@ def delete_credential( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - if parameters is not None: - body_content = self._serialize.body(parameters, 'DataLakeAnalyticsCatalogCredentialDeleteParameters') - else: - body_content = None - # Construct and send request - request = self._client.post(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('USqlCredential', response) + if raw: - client_raw_response = ClientRawResponse(None, response) + client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response + return deserialized + def list_credentials( self, account_name, database_name, filter=None, top=None, skip=None, select=None, orderby=None, count=None, custom_headers=None, raw=False, **operation_config): """Retrieves the list of credentials from the Data Lake Analytics catalog. @@ -685,6 +705,8 @@ def list_credentials( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlCredential + ` :rtype: :class:`USqlCredentialPaged ` :raises: :class:`CloudError` @@ -772,10 +794,13 @@ def get_external_data_source( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlExternalDataSource + ` + or :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlExternalDataSource ` - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -858,6 +883,8 @@ def list_external_data_sources( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlExternalDataSource + ` :rtype: :class:`USqlExternalDataSourcePaged ` :raises: :class:`CloudError` @@ -945,10 +972,13 @@ def get_procedure( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlProcedure + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlProcedure - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -1033,6 +1063,8 @@ def list_procedures( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlProcedure + ` :rtype: :class:`USqlProcedurePaged ` :raises: :class:`CloudError` @@ -1120,10 +1152,13 @@ def get_table( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlTable + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlTable - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -1213,6 +1248,8 @@ def list_tables( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTable + ` :rtype: :class:`USqlTablePaged ` :raises: :class:`CloudError` @@ -1322,6 +1359,8 @@ def list_table_statistics_by_database_and_schema( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTableStatistics + ` :rtype: :class:`USqlTableStatisticsPaged ` :raises: :class:`CloudError` @@ -1411,10 +1450,13 @@ def get_table_type( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlTableType + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlTableType - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -1499,6 +1541,8 @@ def list_table_types( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTableType + ` :rtype: :class:`USqlTableTypePaged ` :raises: :class:`CloudError` @@ -1586,10 +1630,13 @@ def get_package( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlPackage + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlPackage - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -1674,6 +1721,8 @@ def list_packages( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlPackage + ` :rtype: :class:`USqlPackagePaged ` :raises: :class:`CloudError` @@ -1761,10 +1810,13 @@ def get_view( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlView + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlView - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -1848,6 +1900,8 @@ def list_views( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlView + ` :rtype: :class:`USqlViewPaged ` :raises: :class:`CloudError` @@ -1939,10 +1993,13 @@ def get_table_statistic( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlTableStatistics + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlTableStatistics - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -2031,6 +2088,8 @@ def list_table_statistics( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTableStatistics + ` :rtype: :class:`USqlTableStatisticsPaged ` :raises: :class:`CloudError` @@ -2123,10 +2182,13 @@ def get_table_partition( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlTablePartition + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlTablePartition - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -2215,6 +2277,8 @@ def list_table_partitions( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTablePartition + ` :rtype: :class:`USqlTablePartitionPaged ` :raises: :class:`CloudError` @@ -2322,6 +2386,8 @@ def list_types( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlType + ` :rtype: :class:`USqlTypePaged ` :raises: :class:`CloudError` @@ -2413,10 +2479,13 @@ def get_table_valued_function( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlTableValuedFunction + ` + or :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlTableValuedFunction ` - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -2503,6 +2572,8 @@ def list_table_valued_functions( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTableValuedFunction + ` :rtype: :class:`USqlTableValuedFunctionPaged ` :raises: :class:`CloudError` @@ -2589,10 +2660,13 @@ def get_assembly( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlAssembly + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlAssembly - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -2674,6 +2748,8 @@ def list_assemblies( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlAssemblyClr + ` :rtype: :class:`USqlAssemblyClrPaged ` :raises: :class:`CloudError` @@ -2758,10 +2834,13 @@ def get_schema( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlSchema + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlSchema - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -2842,6 +2921,8 @@ def list_schemas( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlSchema + ` :rtype: :class:`USqlSchemaPaged ` :raises: :class:`CloudError` @@ -2946,6 +3027,8 @@ def list_table_statistics_by_database( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTableStatistics + ` :rtype: :class:`USqlTableStatisticsPaged ` :raises: :class:`CloudError` @@ -3054,6 +3137,8 @@ def list_tables_by_database( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTable + ` :rtype: :class:`USqlTablePaged ` :raises: :class:`CloudError` @@ -3160,6 +3245,8 @@ def list_table_valued_functions_by_database( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlTableValuedFunction + ` :rtype: :class:`USqlTableValuedFunctionPaged ` :raises: :class:`CloudError` @@ -3263,6 +3350,8 @@ def list_views_by_database( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlView + ` :rtype: :class:`USqlViewPaged ` :raises: :class:`CloudError` @@ -3345,10 +3434,13 @@ def get_database( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`USqlDatabase + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`USqlDatabase - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -3426,6 +3518,8 @@ def list_databases( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`USqlDatabase + ` :rtype: :class:`USqlDatabasePaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/__init__.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/__init__.py index 0a39ef255293..d28921edb704 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/__init__.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/__init__.py @@ -18,16 +18,22 @@ from .usql_job_properties import USqlJobProperties from .hive_job_properties import HiveJobProperties from .job_properties import JobProperties +from .create_usql_job_properties import CreateUSqlJobProperties +from .create_job_properties import CreateJobProperties from .job_inner_error import JobInnerError from .job_error_details import JobErrorDetails from .job_relationship_properties import JobRelationshipProperties from .job_pipeline_run_information import JobPipelineRunInformation from .job_pipeline_information import JobPipelineInformation from .job_recurrence_information import JobRecurrenceInformation +from .create_job_parameters import CreateJobParameters +from .build_job_parameters import BuildJobParameters +from .base_job_parameters import BaseJobParameters +from .job_information_basic import JobInformationBasic from .job_information import JobInformation from .job_pipeline_information_paged import JobPipelineInformationPaged from .job_recurrence_information_paged import JobRecurrenceInformationPaged -from .job_information_paged import JobInformationPaged +from .job_information_basic_paged import JobInformationBasicPaged from .data_lake_analytics_job_management_client_enums import ( JobResourceType, SeverityTypes, @@ -47,16 +53,22 @@ 'USqlJobProperties', 'HiveJobProperties', 'JobProperties', + 'CreateUSqlJobProperties', + 'CreateJobProperties', 'JobInnerError', 'JobErrorDetails', 'JobRelationshipProperties', 'JobPipelineRunInformation', 'JobPipelineInformation', 'JobRecurrenceInformation', + 'CreateJobParameters', + 'BuildJobParameters', + 'BaseJobParameters', + 'JobInformationBasic', 'JobInformation', 'JobPipelineInformationPaged', 'JobRecurrenceInformationPaged', - 'JobInformationPaged', + 'JobInformationBasicPaged', 'JobResourceType', 'SeverityTypes', 'CompileMode', diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/base_job_parameters.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/base_job_parameters.py new file mode 100644 index 000000000000..8ff42cdafe66 --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/base_job_parameters.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class BaseJobParameters(Model): + """Data Lake Analytics Job Parameters base class for build and submit. + + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :param properties: the job specific properties. + :type properties: :class:`CreateJobProperties + ` + """ + + _validation = { + 'type': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'JobType'}, + 'properties': {'key': 'properties', 'type': 'CreateJobProperties'}, + } + + def __init__(self, type, properties): + self.type = type + self.properties = properties diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/build_job_parameters.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/build_job_parameters.py new file mode 100644 index 000000000000..3bc9fe52b143 --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/build_job_parameters.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .base_job_parameters import BaseJobParameters + + +class BuildJobParameters(BaseJobParameters): + """The parameters used to build a new Data Lake Analytics job. + + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :param properties: the job specific properties. + :type properties: :class:`CreateJobProperties + ` + :param name: the friendly name of the job to build. + :type name: str + """ + + _validation = { + 'type': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'JobType'}, + 'properties': {'key': 'properties', 'type': 'CreateJobProperties'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, type, properties, name=None): + super(BuildJobParameters, self).__init__(type=type, properties=properties) + self.name = name diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_job_parameters.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_job_parameters.py new file mode 100644 index 000000000000..d8fb4e346f1c --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_job_parameters.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .base_job_parameters import BaseJobParameters + + +class CreateJobParameters(BaseJobParameters): + """The parameters used to submit a new Data Lake Analytics job. + + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :param properties: the job specific properties. + :type properties: :class:`CreateJobProperties + ` + :param name: the friendly name of the job to submit. + :type name: str + :param degree_of_parallelism: the degree of parallelism to use for this + job. This must be greater than 0, if set to less than 0 it will default to + 1. Default value: 1 . + :type degree_of_parallelism: int + :param priority: the priority value to use for the current job. Lower + numbers have a higher priority. By default, a job has a priority of 1000. + This must be greater than 0. + :type priority: int + :param log_file_patterns: the list of log file name patterns to find in + the logFolder. '*' is the only matching character allowed. Example format: + jobExecution*.log or *mylog*.txt + :type log_file_patterns: list of str + :param related: the recurring job relationship information properties. + :type related: :class:`JobRelationshipProperties + ` + """ + + _validation = { + 'type': {'required': True}, + 'properties': {'required': True}, + 'name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'JobType'}, + 'properties': {'key': 'properties', 'type': 'CreateJobProperties'}, + 'name': {'key': 'name', 'type': 'str'}, + 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, + 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, + } + + def __init__(self, type, properties, name, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None): + super(CreateJobParameters, self).__init__(type=type, properties=properties) + self.name = name + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.log_file_patterns = log_file_patterns + self.related = related diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_job_properties.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_job_properties.py new file mode 100644 index 000000000000..c4599162449f --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_job_properties.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class CreateJobProperties(Model): + """The common Data Lake Analytics job properties for job submission. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'USql': 'CreateUSqlJobProperties'} + } + + def __init__(self, script, runtime_version=None): + self.runtime_version = runtime_version + self.script = script + self.type = None diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_usql_job_properties.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_usql_job_properties.py new file mode 100644 index 000000000000..e85a3bcfa205 --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/create_usql_job_properties.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .create_job_properties import CreateJobProperties + + +class CreateUSqlJobProperties(CreateJobProperties): + """U-SQL job properties used when submitting U-SQL jobs. + + :param runtime_version: the runtime version of the Data Lake Analytics + engine to use for the specific type of job being run. + :type runtime_version: str + :param script: the script to run + :type script: str + :param type: Polymorphic Discriminator + :type type: str + :param compile_mode: Optionally enforces a specific compilation mode for + the job during execution. If this is not specified during submission, the + server will determine the optimal compilation mode. Possible values + include: 'Semantic', 'Full', 'SingleBox' + :type compile_mode: str or :class:`CompileMode + ` + """ + + _validation = { + 'script': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + 'script': {'key': 'script', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compile_mode': {'key': 'compileMode', 'type': 'CompileMode'}, + } + + def __init__(self, script, runtime_version=None, compile_mode=None): + super(CreateUSqlJobProperties, self).__init__(runtime_version=runtime_version, script=script) + self.compile_mode = compile_mode + self.type = 'USql' diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/hive_job_properties.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/hive_job_properties.py index 2fbbe4e2d2ec..74df4f70dd04 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/hive_job_properties.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/hive_job_properties.py @@ -13,7 +13,7 @@ class HiveJobProperties(JobProperties): - """Hive job properties used when submitting and retrieving Hive jobs. + """Hive job properties used when retrieving Hive jobs. Variables are only populated by the server, and will be ignored when sending a request. diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information.py index 5afccf3d31e8..b4c54c483e39 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information.py @@ -9,11 +9,12 @@ # regenerated. # -------------------------------------------------------------------------- -from msrest.serialization import Model +from .job_information_basic import JobInformationBasic -class JobInformation(Model): - """The common Data Lake Analytics job information properties. +class JobInformation(JobInformationBasic): + """The extended Data Lake Analytics job information properties returned when + retrieving a specific job. Variables are only populated by the server, and will be ignored when sending a request. @@ -28,10 +29,6 @@ class JobInformation(Model): ` :ivar submitter: the user or account that submitted the job. :vartype submitter: str - :ivar error_message: the error message details for the job, if the job - failed. - :vartype error_message: list of :class:`JobErrorDetails - ` :param degree_of_parallelism: the degree of parallelism used for this job. This must be greater than 0, if set to less than 0 it will default to 1. Default value: 1 . @@ -64,6 +61,13 @@ class JobInformation(Model): the logFolder. '*' is the only matching character allowed. Example format: jobExecution*.log or *mylog*.txt :type log_file_patterns: list of str + :param related: the recurring job relationship information properties. + :type related: :class:`JobRelationshipProperties + ` + :ivar error_message: the error message details for the job, if the job + failed. + :vartype error_message: list of :class:`JobErrorDetails + ` :ivar state_audit_records: the job state audit records, indicating when various operations have been performed on this job. :vartype state_audit_records: list of :class:`JobStateAuditRecord @@ -71,9 +75,6 @@ class JobInformation(Model): :param properties: the job specific properties. :type properties: :class:`JobProperties ` - :param related: the recurring job relationship information properties. - :type related: :class:`JobRelationshipProperties - ` """ _validation = { @@ -81,13 +82,13 @@ class JobInformation(Model): 'name': {'required': True}, 'type': {'required': True}, 'submitter': {'readonly': True}, - 'error_message': {'readonly': True}, 'submit_time': {'readonly': True}, 'start_time': {'readonly': True}, 'end_time': {'readonly': True}, 'state': {'readonly': True}, 'result': {'readonly': True}, 'log_folder': {'readonly': True}, + 'error_message': {'readonly': True}, 'state_audit_records': {'readonly': True}, 'properties': {'required': True}, } @@ -97,7 +98,6 @@ class JobInformation(Model): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'JobType'}, 'submitter': {'key': 'submitter', 'type': 'str'}, - 'error_message': {'key': 'errorMessage', 'type': '[JobErrorDetails]'}, 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, 'priority': {'key': 'priority', 'type': 'int'}, 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, @@ -107,26 +107,14 @@ class JobInformation(Model): 'result': {'key': 'result', 'type': 'JobResult'}, 'log_folder': {'key': 'logFolder', 'type': 'str'}, 'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, + 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, + 'error_message': {'key': 'errorMessage', 'type': '[JobErrorDetails]'}, 'state_audit_records': {'key': 'stateAuditRecords', 'type': '[JobStateAuditRecord]'}, 'properties': {'key': 'properties', 'type': 'JobProperties'}, - 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, } def __init__(self, name, type, properties, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None): - self.job_id = None - self.name = name - self.type = type - self.submitter = None + super(JobInformation, self).__init__(name=name, type=type, degree_of_parallelism=degree_of_parallelism, priority=priority, log_file_patterns=log_file_patterns, related=related) self.error_message = None - self.degree_of_parallelism = degree_of_parallelism - self.priority = priority - self.submit_time = None - self.start_time = None - self.end_time = None - self.state = None - self.result = None - self.log_folder = None - self.log_file_patterns = log_file_patterns self.state_audit_records = None self.properties = properties - self.related = related diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_basic.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_basic.py new file mode 100644 index 000000000000..b20b9b653236 --- /dev/null +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_basic.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from msrest.serialization import Model + + +class JobInformationBasic(Model): + """The common Data Lake Analytics job information properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar job_id: the job's unique identifier (a GUID). + :vartype job_id: str + :param name: the friendly name of the job. + :type name: str + :param type: the job type of the current job (Hive or USql). Possible + values include: 'USql', 'Hive' + :type type: str or :class:`JobType + ` + :ivar submitter: the user or account that submitted the job. + :vartype submitter: str + :param degree_of_parallelism: the degree of parallelism used for this job. + This must be greater than 0, if set to less than 0 it will default to 1. + Default value: 1 . + :type degree_of_parallelism: int + :param priority: the priority value for the current job. Lower numbers + have a higher priority. By default, a job has a priority of 1000. This + must be greater than 0. + :type priority: int + :ivar submit_time: the time the job was submitted to the service. + :vartype submit_time: datetime + :ivar start_time: the start time of the job. + :vartype start_time: datetime + :ivar end_time: the completion time of the job. + :vartype end_time: datetime + :ivar state: the job state. When the job is in the Ended state, refer to + Result and ErrorMessage for details. Possible values include: 'Accepted', + 'Compiling', 'Ended', 'New', 'Queued', 'Running', 'Scheduling', + 'Starting', 'Paused', 'WaitingForCapacity' + :vartype state: str or :class:`JobState + ` + :ivar result: the result of job execution or the current result of the + running job. Possible values include: 'None', 'Succeeded', 'Cancelled', + 'Failed' + :vartype result: str or :class:`JobResult + ` + :ivar log_folder: the log folder path to use in the following format: + adl://.azuredatalakestore.net/system/jobservice/jobs/Usql/2016/03/13/17/18/5fe51957-93bc-4de0-8ddc-c5a4753b068b/logs/. + :vartype log_folder: str + :param log_file_patterns: the list of log file name patterns to find in + the logFolder. '*' is the only matching character allowed. Example format: + jobExecution*.log or *mylog*.txt + :type log_file_patterns: list of str + :param related: the recurring job relationship information properties. + :type related: :class:`JobRelationshipProperties + ` + """ + + _validation = { + 'job_id': {'readonly': True}, + 'name': {'required': True}, + 'type': {'required': True}, + 'submitter': {'readonly': True}, + 'submit_time': {'readonly': True}, + 'start_time': {'readonly': True}, + 'end_time': {'readonly': True}, + 'state': {'readonly': True}, + 'result': {'readonly': True}, + 'log_folder': {'readonly': True}, + } + + _attribute_map = { + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'JobType'}, + 'submitter': {'key': 'submitter', 'type': 'str'}, + 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'state': {'key': 'state', 'type': 'JobState'}, + 'result': {'key': 'result', 'type': 'JobResult'}, + 'log_folder': {'key': 'logFolder', 'type': 'str'}, + 'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, + 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, + } + + def __init__(self, name, type, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None): + self.job_id = None + self.name = name + self.type = type + self.submitter = None + self.degree_of_parallelism = degree_of_parallelism + self.priority = priority + self.submit_time = None + self.start_time = None + self.end_time = None + self.state = None + self.result = None + self.log_folder = None + self.log_file_patterns = log_file_patterns + self.related = related diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_basic_paged.py similarity index 72% rename from azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_paged.py rename to azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_basic_paged.py index e9b1627055b8..85a36a8c5510 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information_basic_paged.py @@ -12,16 +12,16 @@ from msrest.paging import Paged -class JobInformationPaged(Paged): +class JobInformationBasicPaged(Paged): """ - A paging container for iterating over a list of JobInformation object + A paging container for iterating over a list of :class:`JobInformationBasic ` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[JobInformation]'} + 'current_page': {'key': 'value', 'type': '[JobInformationBasic]'} } def __init__(self, *args, **kwargs): - super(JobInformationPaged, self).__init__(*args, **kwargs) + super(JobInformationBasicPaged, self).__init__(*args, **kwargs) diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_pipeline_information_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_pipeline_information_paged.py index cc3fd555f68f..c33529c88e6b 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_pipeline_information_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_pipeline_information_paged.py @@ -14,7 +14,7 @@ class JobPipelineInformationPaged(Paged): """ - A paging container for iterating over a list of JobPipelineInformation object + A paging container for iterating over a list of :class:`JobPipelineInformation ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_recurrence_information_paged.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_recurrence_information_paged.py index b922424ac751..ead66eb0c359 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_recurrence_information_paged.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_recurrence_information_paged.py @@ -14,7 +14,7 @@ class JobRecurrenceInformationPaged(Paged): """ - A paging container for iterating over a list of JobRecurrenceInformation object + A paging container for iterating over a list of :class:`JobRecurrenceInformation ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_state_audit_record.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_state_audit_record.py index e306ee19793e..50c488141fe7 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_state_audit_record.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_state_audit_record.py @@ -25,7 +25,7 @@ class JobStateAuditRecord(Model): :vartype time_stamp: datetime :ivar requested_by_user: the user who requests the change. :vartype requested_by_user: str - :ivar details: the details of the audit log. + :ivar details: the details of the audit log. :vartype details: str """ diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/usql_job_properties.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/usql_job_properties.py index 3ef4faefc708..0fc94bebc078 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/usql_job_properties.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/usql_job_properties.py @@ -13,7 +13,7 @@ class USqlJobProperties(JobProperties): - """U-SQL job properties used when submitting and retrieving U-SQL jobs. + """U-SQL job properties used when retrieving U-SQL jobs. Variables are only populated by the server, and will be ignored when sending a request. @@ -25,17 +25,17 @@ class USqlJobProperties(JobProperties): :type script: str :param type: Polymorphic Discriminator :type type: str - :param resources: the list of resources that are required by the job - :type resources: list of :class:`JobResource + :ivar resources: the list of resources that are required by the job + :vartype resources: list of :class:`JobResource ` - :param statistics: the job specific statistics. - :type statistics: :class:`JobStatistics + :ivar statistics: the job specific statistics. + :vartype statistics: :class:`JobStatistics ` - :param debug_data: the job specific debug data locations. - :type debug_data: :class:`JobDataPath + :ivar debug_data: the job specific debug data locations. + :vartype debug_data: :class:`JobDataPath ` - :param diagnostics: the diagnostics for the job. - :type diagnostics: list of :class:`Diagnostics + :ivar diagnostics: the diagnostics for the job. + :vartype diagnostics: list of :class:`Diagnostics ` :ivar algebra_file_path: the algebra file path after the job has completed :vartype algebra_file_path: str @@ -63,17 +63,21 @@ class USqlJobProperties(JobProperties): application executing the job. This value should not be set by the user and will be ignored if it is. :vartype yarn_application_time_stamp: long - :param compile_mode: Optionally enforces a specific compilation mode for - the job during execution. If this is not specified during submission, the - server will determine the optimal compilation mode. Possible values - include: 'Semantic', 'Full', 'SingleBox' - :type compile_mode: str or :class:`CompileMode + :ivar compile_mode: the specific compilation mode for the job used during + execution. If this is not specified during submission, the server will + determine the optimal compilation mode. Possible values include: + 'Semantic', 'Full', 'SingleBox' + :vartype compile_mode: str or :class:`CompileMode ` """ _validation = { 'script': {'required': True}, 'type': {'required': True}, + 'resources': {'readonly': True}, + 'statistics': {'readonly': True}, + 'debug_data': {'readonly': True}, + 'diagnostics': {'readonly': True}, 'algebra_file_path': {'readonly': True}, 'total_compilation_time': {'readonly': True}, 'total_pause_time': {'readonly': True}, @@ -82,6 +86,7 @@ class USqlJobProperties(JobProperties): 'root_process_node_id': {'readonly': True}, 'yarn_application_id': {'readonly': True}, 'yarn_application_time_stamp': {'readonly': True}, + 'compile_mode': {'readonly': True}, } _attribute_map = { @@ -103,12 +108,12 @@ class USqlJobProperties(JobProperties): 'compile_mode': {'key': 'compileMode', 'type': 'CompileMode'}, } - def __init__(self, script, runtime_version=None, resources=None, statistics=None, debug_data=None, diagnostics=None, compile_mode=None): + def __init__(self, script, runtime_version=None): super(USqlJobProperties, self).__init__(runtime_version=runtime_version, script=script) - self.resources = resources - self.statistics = statistics - self.debug_data = debug_data - self.diagnostics = diagnostics + self.resources = None + self.statistics = None + self.debug_data = None + self.diagnostics = None self.algebra_file_path = None self.total_compilation_time = None self.total_pause_time = None @@ -117,5 +122,5 @@ def __init__(self, script, runtime_version=None, resources=None, statistics=None self.root_process_node_id = None self.yarn_application_id = None self.yarn_application_time_stamp = None - self.compile_mode = compile_mode + self.compile_mode = None self.type = 'USql' diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py index 94ab0e30e164..784575a75aaf 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/job_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -49,10 +49,13 @@ def get_statistics( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobStatistics + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobStatistics - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -112,10 +115,13 @@ def get_debug_data_path( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobDataPath + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobDataPath - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -170,17 +176,20 @@ def build( job operations on. :type account_name: str :param parameters: The parameters to build a job. - :type parameters: :class:`JobInformation - ` + :type parameters: :class:`BuildJobParameters + ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobInformation + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobInformation - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -206,7 +215,7 @@ def build( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body - body_content = self._serialize.body(parameters, 'JobInformation') + body_content = self._serialize.body(parameters, 'BuildJobParameters') # Construct and send request request = self._client.post(url, query_parameters) @@ -243,9 +252,11 @@ def cancel( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -284,24 +295,30 @@ def cancel( client_raw_response = ClientRawResponse(None, response) return client_raw_response - def get( - self, account_name, job_identity, custom_headers=None, raw=False, **operation_config): - """Gets the job information for the specified job ID. + def create( + self, account_name, job_identity, parameters, custom_headers=None, raw=False, **operation_config): + """Submits a job to the specified Data Lake Analytics account. :param account_name: The Azure Data Lake Analytics account to execute job operations on. :type account_name: str - :param job_identity: JobInfo ID. + :param job_identity: The job ID (a GUID) for the job being submitted. :type job_identity: str + :param parameters: The parameters to submit a job. + :type parameters: :class:`CreateJobParameters + ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobInformation + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobInformation - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -327,9 +344,13 @@ def get( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + # Construct body + body_content = self._serialize.body(parameters, 'CreateJobParameters') + # Construct and send request - request = self._client.get(url, query_parameters) - response = self._client.send(request, header_parameters, **operation_config) + request = self._client.put(url, query_parameters) + response = self._client.send( + request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -347,27 +368,27 @@ def get( return deserialized - def create( - self, account_name, job_identity, parameters, custom_headers=None, raw=False, **operation_config): - """Submits a job to the specified Data Lake Analytics account. + def get( + self, account_name, job_identity, custom_headers=None, raw=False, **operation_config): + """Gets the job information for the specified job ID. :param account_name: The Azure Data Lake Analytics account to execute job operations on. :type account_name: str - :param job_identity: The job ID (a GUID) for the job being submitted. + :param job_identity: JobInfo ID. :type job_identity: str - :param parameters: The parameters to submit a job. - :type parameters: :class:`JobInformation - ` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobInformation + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobInformation - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -393,13 +414,9 @@ def create( if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - # Construct body - body_content = self._serialize.body(parameters, 'JobInformation') - # Construct and send request - request = self._client.put(url, query_parameters) - response = self._client.send( - request, header_parameters, body_content, **operation_config) + request = self._client.get(url, query_parameters) + response = self._client.send(request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) @@ -451,8 +468,10 @@ def list( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`JobInformationPaged - ` + :return: An iterator like instance of :class:`JobInformationBasic + ` + :rtype: :class:`JobInformationBasicPaged + ` :raises: :class:`CloudError` """ def internal_paging(next_link=None, raw=False): @@ -509,11 +528,11 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.JobInformationPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.JobInformationBasicPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.JobInformationPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.JobInformationBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/pipeline_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/pipeline_operations.py index 6d06fb64d8bc..2d483b9d84c5 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/pipeline_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/pipeline_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -55,6 +55,8 @@ def list( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`JobPipelineInformation + ` :rtype: :class:`JobPipelineInformationPaged ` :raises: :class:`CloudError` @@ -136,10 +138,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobPipelineInformation + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobPipelineInformation - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/recurrence_operations.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/recurrence_operations.py index 4a881ed4d099..c9d3641f372c 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/recurrence_operations.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/operations/recurrence_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -55,6 +55,8 @@ def list( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`JobRecurrenceInformation + ` :rtype: :class:`JobRecurrenceInformationPaged ` :raises: :class:`CloudError` @@ -136,10 +138,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`JobRecurrenceInformation + ` + or :class:`ClientRawResponse` if + raw=true :rtype: :class:`JobRecurrenceInformation ` - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL diff --git a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/version.py b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/version.py index 737d5bbd6bfb..ddb11a0fe462 100644 --- a/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/version.py +++ b/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/version.py @@ -5,4 +5,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "0.1.6" \ No newline at end of file +VERSION = "0.2.0" \ No newline at end of file diff --git a/azure-mgmt-datalake-store/HISTORY.rst b/azure-mgmt-datalake-store/HISTORY.rst index 840b648cde4d..a0dae6ce221e 100644 --- a/azure-mgmt-datalake-store/HISTORY.rst +++ b/azure-mgmt-datalake-store/HISTORY.rst @@ -2,6 +2,31 @@ Release History =============== +0.2.0 (2017-07-25) +++++++++++++++++++ + +**Breaking change** + +* When getting a list of accounts, the object type that is returned is DataLakeAnalyticsAccountBasic and not DataLakeAnalyticsAccount (more information on the difference is below in the Notes section) +* Standardized the parameter name for file paths in the url (e.g. fileDestination to path) + +**Notes** + +* When getting a list of accounts, the account information for each account now includes a strict subset of the account information that is returned when getting a single account + + * There are two ways to get a list of accounts: List and ListByResource methods + * The following fields are included in the account information when getting a list of accounts, which is less than the account information retrieved for a single account: + + * provisioningState + * state + * creationTime + * lastModifiedTime + * endpoint + +* When retrieving account information, an account id field called "accountId" is now included. + + * accountId's description: The unique identifier associated with this Data Lake Analytics account. + 0.1.6 (2017-06-19) ++++++++++++++++++ * Fixing a regression discovered in 0.1.5. Please update to 0.1.6 to avoid any issues caused by that regression. diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/__init__.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/__init__.py index 3207400a1bca..faf516d80a99 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/__init__.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/__init__.py @@ -20,22 +20,23 @@ from .update_encryption_config import UpdateEncryptionConfig from .data_lake_store_account_update_parameters import DataLakeStoreAccountUpdateParameters from .data_lake_store_account import DataLakeStoreAccount +from .data_lake_store_account_basic import DataLakeStoreAccountBasic from .error_details import ErrorDetails from .resource import Resource from .sub_resource import SubResource from .firewall_rule_paged import FirewallRulePaged from .trusted_id_provider_paged import TrustedIdProviderPaged -from .data_lake_store_account_paged import DataLakeStoreAccountPaged +from .data_lake_store_account_basic_paged import DataLakeStoreAccountBasicPaged from .data_lake_store_account_management_client_enums import ( EncryptionConfigType, - DataLakeStoreAccountStatus, - DataLakeStoreAccountState, EncryptionState, EncryptionProvisioningState, FirewallState, TrustedIdProviderState, TierType, FirewallAllowAzureIpsState, + DataLakeStoreAccountStatus, + DataLakeStoreAccountState, ) __all__ = [ @@ -50,19 +51,20 @@ 'UpdateEncryptionConfig', 'DataLakeStoreAccountUpdateParameters', 'DataLakeStoreAccount', + 'DataLakeStoreAccountBasic', 'ErrorDetails', 'Resource', 'SubResource', 'FirewallRulePaged', 'TrustedIdProviderPaged', - 'DataLakeStoreAccountPaged', + 'DataLakeStoreAccountBasicPaged', 'EncryptionConfigType', - 'DataLakeStoreAccountStatus', - 'DataLakeStoreAccountState', 'EncryptionState', 'EncryptionProvisioningState', 'FirewallState', 'TrustedIdProviderState', 'TierType', 'FirewallAllowAzureIpsState', + 'DataLakeStoreAccountStatus', + 'DataLakeStoreAccountState', ] diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account.py index 56a7325957d6..0decb5338664 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account.py @@ -31,18 +31,24 @@ class DataLakeStoreAccount(Resource): :param identity: The Key Vault encryption identity, if any. :type identity: :class:`EncryptionIdentity ` - :ivar provisioning_state: the status of the Data Lake Store account while - being provisioned. Possible values include: 'Failed', 'Creating', - 'Running', 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', - 'Deleted' + :ivar provisioning_state: the provisioning status of the Data Lake Store + account. Possible values include: 'Failed', 'Creating', 'Running', + 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', 'Deleted' :vartype provisioning_state: str or :class:`DataLakeStoreAccountStatus ` - :ivar state: the status of the Data Lake Store account after provisioning - has completed. Possible values include: 'Active', 'Suspended' + :ivar state: the state of the Data Lake Store account. Possible values + include: 'Active', 'Suspended' :vartype state: str or :class:`DataLakeStoreAccountState ` :ivar creation_time: the account creation time. :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Store account. + :vartype account_id: str :param encryption_state: The current state of encryption for this Data Lake store account. Possible values include: 'Enabled', 'Disabled' :type encryption_state: str or :class:`EncryptionState @@ -74,10 +80,6 @@ class DataLakeStoreAccount(Resource): associated with this Data Lake store account. :type trusted_id_providers: list of :class:`TrustedIdProvider ` - :ivar last_modified_time: the account last modified time. - :vartype last_modified_time: datetime - :ivar endpoint: the gateway host. - :vartype endpoint: str :param default_group: the default owner group for all new folders and files created in the Data Lake Store account. :type default_group: str @@ -108,9 +110,10 @@ class DataLakeStoreAccount(Resource): 'provisioning_state': {'readonly': True}, 'state': {'readonly': True}, 'creation_time': {'readonly': True}, - 'encryption_provisioning_state': {'readonly': True}, 'last_modified_time': {'readonly': True}, 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + 'encryption_provisioning_state': {'readonly': True}, 'current_tier': {'readonly': True}, } @@ -124,6 +127,9 @@ class DataLakeStoreAccount(Resource): 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeStoreAccountStatus'}, 'state': {'key': 'properties.state', 'type': 'DataLakeStoreAccountState'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, 'encryption_state': {'key': 'properties.encryptionState', 'type': 'EncryptionState'}, 'encryption_provisioning_state': {'key': 'properties.encryptionProvisioningState', 'type': 'EncryptionProvisioningState'}, 'encryption_config': {'key': 'properties.encryptionConfig', 'type': 'EncryptionConfig'}, @@ -131,8 +137,6 @@ class DataLakeStoreAccount(Resource): 'firewall_rules': {'key': 'properties.firewallRules', 'type': '[FirewallRule]'}, 'trusted_id_provider_state': {'key': 'properties.trustedIdProviderState', 'type': 'TrustedIdProviderState'}, 'trusted_id_providers': {'key': 'properties.trustedIdProviders', 'type': '[TrustedIdProvider]'}, - 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, - 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, 'default_group': {'key': 'properties.defaultGroup', 'type': 'str'}, 'new_tier': {'key': 'properties.newTier', 'type': 'TierType'}, 'current_tier': {'key': 'properties.currentTier', 'type': 'TierType'}, @@ -145,6 +149,9 @@ def __init__(self, location, tags=None, identity=None, encryption_state=None, en self.provisioning_state = None self.state = None self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None self.encryption_state = encryption_state self.encryption_provisioning_state = None self.encryption_config = encryption_config @@ -152,8 +159,6 @@ def __init__(self, location, tags=None, identity=None, encryption_state=None, en self.firewall_rules = firewall_rules self.trusted_id_provider_state = trusted_id_provider_state self.trusted_id_providers = trusted_id_providers - self.last_modified_time = None - self.endpoint = None self.default_group = default_group self.new_tier = new_tier self.current_tier = None diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_basic.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_basic.py new file mode 100644 index 000000000000..9e200cd83459 --- /dev/null +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_basic.py @@ -0,0 +1,85 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .resource import Resource + + +class DataLakeStoreAccountBasic(Resource): + """Basic Data Lake Store account information, returned on list calls. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Resource Id + :vartype id: str + :ivar name: Resource name + :vartype name: str + :ivar type: Resource type + :vartype type: str + :param location: Resource location + :type location: str + :param tags: Resource tags + :type tags: dict + :ivar provisioning_state: the provisioning status of the Data Lake Store + account. Possible values include: 'Failed', 'Creating', 'Running', + 'Succeeded', 'Patching', 'Suspending', 'Resuming', 'Deleting', 'Deleted' + :vartype provisioning_state: str or :class:`DataLakeStoreAccountStatus + ` + :ivar state: the state of the Data Lake Store account. Possible values + include: 'Active', 'Suspended' + :vartype state: str or :class:`DataLakeStoreAccountState + ` + :ivar creation_time: the account creation time. + :vartype creation_time: datetime + :ivar last_modified_time: the account last modified time. + :vartype last_modified_time: datetime + :ivar endpoint: the full CName endpoint for this account. + :vartype endpoint: str + :ivar account_id: The unique identifier associated with this Data Lake + Store account. + :vartype account_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'state': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'last_modified_time': {'readonly': True}, + 'endpoint': {'readonly': True}, + 'account_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'DataLakeStoreAccountStatus'}, + 'state': {'key': 'properties.state', 'type': 'DataLakeStoreAccountState'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'iso-8601'}, + 'endpoint': {'key': 'properties.endpoint', 'type': 'str'}, + 'account_id': {'key': 'properties.accountId', 'type': 'str'}, + } + + def __init__(self, location, tags=None): + super(DataLakeStoreAccountBasic, self).__init__(location=location, tags=tags) + self.provisioning_state = None + self.state = None + self.creation_time = None + self.last_modified_time = None + self.endpoint = None + self.account_id = None diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_paged.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_basic_paged.py similarity index 70% rename from azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_paged.py rename to azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_basic_paged.py index 3bbaebca0ddf..61a413fcb2b3 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_paged.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_basic_paged.py @@ -12,16 +12,16 @@ from msrest.paging import Paged -class DataLakeStoreAccountPaged(Paged): +class DataLakeStoreAccountBasicPaged(Paged): """ - A paging container for iterating over a list of DataLakeStoreAccount object + A paging container for iterating over a list of :class:`DataLakeStoreAccountBasic ` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[DataLakeStoreAccount]'} + 'current_page': {'key': 'value', 'type': '[DataLakeStoreAccountBasic]'} } def __init__(self, *args, **kwargs): - super(DataLakeStoreAccountPaged, self).__init__(*args, **kwargs) + super(DataLakeStoreAccountBasicPaged, self).__init__(*args, **kwargs) diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_management_client_enums.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_management_client_enums.py index 9bbd464c8919..eecabf9ea0b7 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_management_client_enums.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/data_lake_store_account_management_client_enums.py @@ -18,25 +18,6 @@ class EncryptionConfigType(Enum): service_managed = "ServiceManaged" -class DataLakeStoreAccountStatus(Enum): - - failed = "Failed" - creating = "Creating" - running = "Running" - succeeded = "Succeeded" - patching = "Patching" - suspending = "Suspending" - resuming = "Resuming" - deleting = "Deleting" - deleted = "Deleted" - - -class DataLakeStoreAccountState(Enum): - - active = "Active" - suspended = "Suspended" - - class EncryptionState(Enum): enabled = "Enabled" @@ -76,3 +57,22 @@ class FirewallAllowAzureIpsState(Enum): enabled = "Enabled" disabled = "Disabled" + + +class DataLakeStoreAccountStatus(Enum): + + failed = "Failed" + creating = "Creating" + running = "Running" + succeeded = "Succeeded" + patching = "Patching" + suspending = "Suspending" + resuming = "Resuming" + deleting = "Deleting" + deleted = "Deleted" + + +class DataLakeStoreAccountState(Enum): + + active = "Active" + suspended = "Suspended" diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/firewall_rule_paged.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/firewall_rule_paged.py index 90cd653ec5f8..3db1c14e4e67 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/firewall_rule_paged.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/firewall_rule_paged.py @@ -14,7 +14,7 @@ class FirewallRulePaged(Paged): """ - A paging container for iterating over a list of FirewallRule object + A paging container for iterating over a list of :class:`FirewallRule ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/trusted_id_provider_paged.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/trusted_id_provider_paged.py index 9ca036e4ad26..27ffcf3c49e5 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/trusted_id_provider_paged.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/models/trusted_id_provider_paged.py @@ -14,7 +14,7 @@ class TrustedIdProviderPaged(Paged): """ - A paging container for iterating over a list of TrustedIdProvider object + A paging container for iterating over a list of :class:`TrustedIdProvider ` object """ _attribute_map = { diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/account_operations.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/account_operations.py index c1204c311483..51e5791b039a 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/account_operations.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/account_operations.py @@ -9,10 +9,10 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError from msrestazure.azure_operation import AzureOperationPoller -import uuid from .. import models @@ -52,12 +52,15 @@ def create( :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response - :rtype: + :return: :class:`AzureOperationPoller` instance that returns :class:`DataLakeStoreAccount - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: + :class:`AzureOperationPoller` + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -103,17 +106,17 @@ def get_long_running_status(status_link, headers=None): def get_long_running_output(response): - if response.status_code not in [201, 200]: + if response.status_code not in [200, 201]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp deserialized = None - if response.status_code == 201: - deserialized = self._deserialize('DataLakeStoreAccount', response) if response.status_code == 200: deserialized = self._deserialize('DataLakeStoreAccount', response) + if response.status_code == 201: + deserialized = self._deserialize('DataLakeStoreAccount', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) @@ -148,12 +151,15 @@ def update( :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response - :rtype: + :return: :class:`AzureOperationPoller` instance that returns :class:`DataLakeStoreAccount - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` if + raw=true + :rtype: + :class:`AzureOperationPoller` + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -240,11 +246,14 @@ def delete( :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response + :return: + :class:`AzureOperationPoller` + instance that returns None or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`AzureOperationPoller` - instance that returns None - :rtype: :class:`ClientRawResponse` - if raw=true + or :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -320,10 +329,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`DataLakeStoreAccount + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`DataLakeStoreAccount - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -385,9 +397,11 @@ def enable_key_vault( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -459,8 +473,11 @@ def list_by_resource_group( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`DataLakeStoreAccountPaged - ` + :return: An iterator like instance of + :class:`DataLakeStoreAccountBasic + ` + :rtype: :class:`DataLakeStoreAccountBasicPaged + ` :raises: :class:`CloudError` """ def internal_paging(next_link=None, raw=False): @@ -517,11 +534,11 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.DataLakeStoreAccountPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.DataLakeStoreAccountBasicPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.DataLakeStoreAccountPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.DataLakeStoreAccountBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized @@ -556,8 +573,11 @@ def list( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: :class:`DataLakeStoreAccountPaged - ` + :return: An iterator like instance of + :class:`DataLakeStoreAccountBasic + ` + :rtype: :class:`DataLakeStoreAccountBasicPaged + ` :raises: :class:`CloudError` """ def internal_paging(next_link=None, raw=False): @@ -613,11 +633,11 @@ def internal_paging(next_link=None, raw=False): return response # Deserialize response - deserialized = models.DataLakeStoreAccountPaged(internal_paging, self._deserialize.dependencies) + deserialized = models.DataLakeStoreAccountBasicPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} - client_raw_response = models.DataLakeStoreAccountPaged(internal_paging, self._deserialize.dependencies, header_dict) + client_raw_response = models.DataLakeStoreAccountBasicPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/firewall_rules_operations.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/firewall_rules_operations.py index 1ace03aec813..56eb105ade09 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/firewall_rules_operations.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/firewall_rules_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -59,10 +59,13 @@ def create_or_update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`FirewallRule - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -138,10 +141,13 @@ def update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`FirewallRule - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = None @@ -217,9 +223,11 @@ def delete( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -276,10 +284,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`FirewallRule + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`FirewallRule - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -342,6 +353,8 @@ def list_by_account( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`FirewallRule + ` :rtype: :class:`FirewallRulePaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/trusted_id_providers_operations.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/trusted_id_providers_operations.py index b62e68174f42..52a359914005 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/trusted_id_providers_operations.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/operations/trusted_id_providers_operations.py @@ -9,9 +9,9 @@ # regenerated. # -------------------------------------------------------------------------- +import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError -import uuid from .. import models @@ -60,10 +60,13 @@ def create_or_update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`TrustedIdProvider + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`TrustedIdProvider - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = models.TrustedIdProvider(name=name, id_provider=id_provider) @@ -137,10 +140,13 @@ def update( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`TrustedIdProvider + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`TrustedIdProvider - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ parameters = None @@ -217,9 +223,11 @@ def delete( deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :rtype: None - :rtype: :class:`ClientRawResponse` - if raw=true + :return: None or + :class:`ClientRawResponse` if + raw=true + :rtype: None or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -277,10 +285,13 @@ def get( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: :class:`TrustedIdProvider + ` or + :class:`ClientRawResponse` if + raw=true :rtype: :class:`TrustedIdProvider - ` - :rtype: :class:`ClientRawResponse` - if raw=true + ` or + :class:`ClientRawResponse` :raises: :class:`CloudError` """ # Construct URL @@ -343,6 +354,8 @@ def list_by_account( deserialized response :param operation_config: :ref:`Operation configuration overrides`. + :return: An iterator like instance of :class:`TrustedIdProvider + ` :rtype: :class:`TrustedIdProviderPaged ` :raises: :class:`CloudError` diff --git a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/version.py b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/version.py index 9a6b4374370f..9bd1dfac7ecb 100644 --- a/azure-mgmt-datalake-store/azure/mgmt/datalake/store/version.py +++ b/azure-mgmt-datalake-store/azure/mgmt/datalake/store/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.1.6" +VERSION = "0.2.0" diff --git a/azure-mgmt/tests/test_mgmt_datalake_analytics.py b/azure-mgmt/tests/test_mgmt_datalake_analytics.py index 3e925f866805..430177c0c4dc 100644 --- a/azure-mgmt/tests/test_mgmt_datalake_analytics.py +++ b/azure-mgmt/tests/test_mgmt_datalake_analytics.py @@ -229,11 +229,11 @@ def run_prereqs(self, create_job_acct = False, create_catalog = False): def run_job_to_completion(self, adla_account_name, job_id, script_to_run, job_params=None): if not job_params: - job_params = azure.mgmt.datalake.analytics.job.models.JobInformation( + job_params = azure.mgmt.datalake.analytics.job.models.CreateJobParameters( name = self.get_resource_name('testjob'), type = azure.mgmt.datalake.analytics.job.models.JobType.usql, degree_of_parallelism = 2, - properties = azure.mgmt.datalake.analytics.job.models.USqlJobProperties( + properties = azure.mgmt.datalake.analytics.job.models.CreateUSqlJobProperties( script = script_to_run, ) ) @@ -267,11 +267,11 @@ def run_job_to_completion(self, adla_account_name, job_id, script_to_run, job_pa def test_adla_jobs(self): self.run_prereqs(create_job_acct= True, create_catalog = False) # define some static GUIDs - job_to_submit = azure.mgmt.datalake.analytics.job.models.JobInformation( + job_to_submit = azure.mgmt.datalake.analytics.job.models.CreateJobParameters( name = 'azure python sdk job test', degree_of_parallelism = 2, type = azure.mgmt.datalake.analytics.job.models.JobType.usql, - properties = azure.mgmt.datalake.analytics.job.models.USqlJobProperties( + properties = azure.mgmt.datalake.analytics.job.models.CreateUSqlJobProperties( script = 'DROP DATABASE IF EXISTS testdb; CREATE DATABASE testdb;' ), related = azure.mgmt.datalake.analytics.job.models.JobRelationshipProperties( @@ -352,13 +352,22 @@ def test_adla_jobs(self): pipeline_list = list(pipeline_list) self.assertEqual(1, len(pipeline_list)) + # create a job to build + job_to_build = azure.mgmt.datalake.analytics.job.models.BuildJobParameters( + name = 'azure python sdk job test', + type = azure.mgmt.datalake.analytics.job.models.JobType.usql, + properties = azure.mgmt.datalake.analytics.job.models.CreateUSqlJobProperties( + script = 'DROP DATABASE IF EXISTS testdb; CREATE DATABASE testdb;' + ) + ) + # compile a job - compile_response = self.adla_job_client.job.build(self.job_account_name, job_to_submit) + compile_response = self.adla_job_client.job.build(self.job_account_name, job_to_build) self.assertIsNotNone(compile_response) # now compile a broken job and validate diagnostics. - job_to_submit.properties.script = 'DROP DATABASE IF EXIST FOO; CREATE DATABASE FOO;' - compile_response = self.adla_job_client.job.build(self.job_account_name, job_to_submit) + job_to_build.properties.script = 'DROP DATABASE IF EXIST FOO; CREATE DATABASE FOO;' + compile_response = self.adla_job_client.job.build(self.job_account_name, job_to_build) self.assertIsNotNone(compile_response) self.assertEqual(1, len(list(compile_response.properties.diagnostics))) diff --git a/requirements.txt b/requirements.txt index e026fe3f4cc6..75e19d043740 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ futures;python_version<="2.7" python-dateutil pyopenssl -msrestazure>=0.4.0,<0.5.0 +msrestazure>=0.4.0,<0.5.0 \ No newline at end of file diff --git a/swagger_to_sdk_config.json b/swagger_to_sdk_config.json index c994e6fabc0a..84188b5103ed 100644 --- a/swagger_to_sdk_config.json +++ b/swagger_to_sdk_config.json @@ -584,7 +584,7 @@ }, "datalake.analytics.catalog": { "autorest_options": { - "input-file": "specification/datalake-analytics/resource-manager/Microsoft.DataLakeAnalytics/2016-11-01/catalog.json", + "input-file": "specification/datalake-analytics/data-plane/Microsoft.DataLakeAnalytics/2016-11-01/catalog.json", "namespace": "azure.mgmt.datalake.analytics.catalog", "package-version": "0.1.6" }, @@ -593,7 +593,7 @@ }, "datalake.analytics.job": { "autorest_options": { - "input-file": "specification/datalake-analytics/resource-manager/Microsoft.DataLakeAnalytics/2016-11-01/job.json", + "input-file": "specification/datalake-analytics/data-plane/Microsoft.DataLakeAnalytics/2016-11-01/job.json", "namespace": "azure.mgmt.datalake.analytics.job", "package-version": "0.1.6" },