diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index ea93269acef1..d8e0b105113f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -57,8 +57,11 @@ from .operation_service_specification import OperationServiceSpecification from .operation import Operation from .operation_list_response import OperationListResponse +from .azure_databricks_linked_service import AzureDatabricksLinkedService from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService +from .netezza_linked_service import NetezzaLinkedService +from .vertica_linked_service import VerticaLinkedService from .zoho_linked_service import ZohoLinkedService from .xero_linked_service import XeroLinkedService from .square_linked_service import SquareLinkedService @@ -126,6 +129,8 @@ from .sql_server_linked_service import SqlServerLinkedService from .azure_sql_dw_linked_service import AzureSqlDWLinkedService from .azure_storage_linked_service import AzureStorageLinkedService +from .vertica_table_dataset import VerticaTableDataset +from .netezza_table_dataset import NetezzaTableDataset from .zoho_object_dataset import ZohoObjectDataset from .xero_object_dataset import XeroObjectDataset from .square_object_dataset import SquareObjectDataset @@ -195,6 +200,7 @@ from .schedule_trigger import ScheduleTrigger from .multiple_pipeline_trigger import MultiplePipelineTrigger from .activity_policy import ActivityPolicy +from .databricks_notebook_activity import DatabricksNotebookActivity from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity from .azure_ml_web_service_file import AzureMLWebServiceFile @@ -204,6 +210,8 @@ from .web_activity import WebActivity from .redshift_unload_settings import RedshiftUnloadSettings from .amazon_redshift_source import AmazonRedshiftSource +from .vertica_source import VerticaSource +from .netezza_source import NetezzaSource from .zoho_source import ZohoSource from .xero_source import XeroSource from .square_source import SquareSource @@ -433,8 +441,11 @@ 'OperationServiceSpecification', 'Operation', 'OperationListResponse', + 'AzureDatabricksLinkedService', 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemandLinkedService', + 'NetezzaLinkedService', + 'VerticaLinkedService', 'ZohoLinkedService', 'XeroLinkedService', 'SquareLinkedService', @@ -502,6 +513,8 @@ 'SqlServerLinkedService', 'AzureSqlDWLinkedService', 'AzureStorageLinkedService', + 'VerticaTableDataset', + 'NetezzaTableDataset', 'ZohoObjectDataset', 'XeroObjectDataset', 'SquareObjectDataset', @@ -571,6 +584,7 @@ 'ScheduleTrigger', 'MultiplePipelineTrigger', 'ActivityPolicy', + 'DatabricksNotebookActivity', 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResourceActivity', 'AzureMLWebServiceFile', @@ -580,6 +594,8 @@ 'WebActivity', 'RedshiftUnloadSettings', 'AmazonRedshiftSource', + 'VerticaSource', + 'NetezzaSource', 'ZohoSource', 'XeroSource', 'SquareSource', diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py new file mode 100644 index 000000000000..f5a4e5850739 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/azure_databricks_linked_service.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param type: Constant filled by server. + :type type: str + :param domain: .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string (or Expression with resultType string). + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param existing_cluster_id: The id of an existing cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type existing_cluster_id: object + :param new_cluster_version: The Spark version of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: Number of worker nodes that new cluster + should have. A string formatted Int32, like '1' means numOfWorker is 1 or + '1:10' means auto-scale from 1 as min and 10 as max. Type: string (or + Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node types of new cluster. Type: string + (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: a set of optional, user-specified Spark + configuration key-value pairs. + :type new_cluster_spark_conf: dict[str, object] + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, domain, access_token, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, existing_cluster_id=None, new_cluster_version=None, new_cluster_num_of_worker=None, new_cluster_node_type=None, new_cluster_spark_conf=None, encrypted_credential=None): + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.domain = domain + self.access_token = access_token + self.existing_cluster_id = existing_cluster_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricks' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py index 447e58c1d259..c808083d9330 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/copy_source.py @@ -16,13 +16,13 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, ZohoSource, XeroSource, - SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, - HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, - EloquaSource, DrillSource, CouchbaseSource, ConcurSource, - AzurePostgreSqlSource, AmazonMWSSource, HttpSource, + sub-classes are: AmazonRedshiftSource, VerticaSource, NetezzaSource, + ZohoSource, XeroSource, SquareSource, SparkSource, ShopifySource, + ServiceNowSource, QuickBooksSource, PrestoSource, PhoenixSource, + PaypalSource, MarketoSource, MariaDBSource, MagentoSource, JiraSource, + ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, AzureDataLakeStoreSource, MongoDbSource, CassandraSource, WebSource, OracleSource, AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, @@ -55,7 +55,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlSource': 'SqlSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'RelationalSource': 'RelationalSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource'} } def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py new file mode 100644 index 000000000000..b23a8fc21092 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/databricks_notebook_activity.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .execution_activity import ExecutionActivity + + +class DatabricksNotebookActivity(ExecutionActivity): + """DatabricksNotebook activity. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param type: Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param notebook_path: The absolute path of the notebook to be run in the + Databricks Workspace. This path must begin with a slash. Type: string (or + Expression with resultType string). + :type notebook_path: object + :param base_parameters: Base parameters to be used for each run of this + job.If the notebook takes a parameter that is not specified, the default + value from the notebook will be used. + :type base_parameters: dict[str, object] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'notebook_path': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'notebook_path': {'key': 'typeProperties.notebookPath', 'type': 'object'}, + 'base_parameters': {'key': 'typeProperties.baseParameters', 'type': '{object}'}, + } + + def __init__(self, name, notebook_path, additional_properties=None, description=None, depends_on=None, linked_service_name=None, policy=None, base_parameters=None): + super(DatabricksNotebookActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, linked_service_name=linked_service_name, policy=policy) + self.notebook_path = notebook_path + self.base_parameters = base_parameters + self.type = 'DatabricksNotebook' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py index a468aca39fe2..e9014ff396b4 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dataset.py @@ -17,7 +17,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZohoObjectDataset, XeroObjectDataset, SquareObjectDataset, + sub-classes are: VerticaTableDataset, NetezzaTableDataset, + ZohoObjectDataset, XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, MariaDBTableDataset, @@ -72,7 +73,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py index 245b5621d3ec..c65430db8229 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/execution_activity.py @@ -16,7 +16,7 @@ class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataLakeAnalyticsUSQLActivity, + sub-classes are: DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, LookupActivity, SqlServerStoredProcedureActivity, CustomActivity, @@ -58,7 +58,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, name, additional_properties=None, description=None, depends_on=None, linked_service_name=None, policy=None): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py index 72d64e684575..0fbef57855ef 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/google_big_query_linked_service.py @@ -49,6 +49,12 @@ class GoogleBigQueryLinkedService(LinkedService): :param refresh_token: The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. :type refresh_token: ~azure.mgmt.datafactory.models.SecretBase + :param client_id: The client id of the google application used to acquire + the refresh token. + :type client_id: ~azure.mgmt.datafactory.models.SecretBase + :param client_secret: The client secret of the google application used to + acquire the refresh token. + :type client_secret: ~azure.mgmt.datafactory.models.SecretBase :param email: The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. :type email: object @@ -89,6 +95,8 @@ class GoogleBigQueryLinkedService(LinkedService): 'request_google_drive_scope': {'key': 'typeProperties.requestGoogleDriveScope', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'refresh_token': {'key': 'typeProperties.refreshToken', 'type': 'SecretBase'}, + 'client_id': {'key': 'typeProperties.clientId', 'type': 'SecretBase'}, + 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'email': {'key': 'typeProperties.email', 'type': 'object'}, 'key_file_path': {'key': 'typeProperties.keyFilePath', 'type': 'object'}, 'trusted_cert_path': {'key': 'typeProperties.trustedCertPath', 'type': 'object'}, @@ -96,13 +104,15 @@ class GoogleBigQueryLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, project, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None): + def __init__(self, project, authentication_type, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, additional_projects=None, request_google_drive_scope=None, refresh_token=None, client_id=None, client_secret=None, email=None, key_file_path=None, trusted_cert_path=None, use_system_trust_store=None, encrypted_credential=None): super(GoogleBigQueryLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) self.project = project self.additional_projects = additional_projects self.request_google_drive_scope = request_google_drive_scope self.authentication_type = authentication_type self.refresh_token = refresh_token + self.client_id = client_id + self.client_secret = client_secret self.email = email self.key_file_path = key_file_path self.trusted_cert_path = trusted_cert_path diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py index aee4fdf8101c..fc7eb34cb00e 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/hd_insight_on_demand_linked_service.py @@ -121,6 +121,15 @@ class HDInsightOnDemandLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param head_node_size: Specifies the size of the head node for the + HDInsight cluster. + :type head_node_size: object + :param data_node_size: Specifies the size of the data node for the + HDInsight cluster. + :type data_node_size: object + :param zookeeper_node_size: Specifies the size of the Zoo Keeper node for + the HDInsight cluster. + :type zookeeper_node_size: object """ _validation = { @@ -168,9 +177,12 @@ class HDInsightOnDemandLinkedService(LinkedService): 'storm_configuration': {'key': 'typeProperties.stormConfiguration', 'type': 'object'}, 'yarn_configuration': {'key': 'typeProperties.yarnConfiguration', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'head_node_size': {'key': 'typeProperties.headNodeSize', 'type': 'object'}, + 'data_node_size': {'key': 'typeProperties.dataNodeSize', 'type': 'object'}, + 'zookeeper_node_size': {'key': 'typeProperties.zookeeperNodeSize', 'type': 'object'}, } - def __init__(self, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None): + def __init__(self, cluster_size, time_to_live, version, linked_service_name, host_subscription_id, tenant, cluster_resource_group, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, cluster_name_prefix=None, cluster_user_name=None, cluster_password=None, cluster_ssh_user_name=None, cluster_ssh_password=None, additional_linked_service_names=None, hcatalog_linked_service_name=None, cluster_type=None, spark_version=None, core_configuration=None, h_base_configuration=None, hdfs_configuration=None, hive_configuration=None, map_reduce_configuration=None, oozie_configuration=None, storm_configuration=None, yarn_configuration=None, encrypted_credential=None, head_node_size=None, data_node_size=None, zookeeper_node_size=None): super(HDInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) self.cluster_size = cluster_size self.time_to_live = time_to_live @@ -199,4 +211,7 @@ def __init__(self, cluster_size, time_to_live, version, linked_service_name, hos self.storm_configuration = storm_configuration self.yarn_configuration = yarn_configuration self.encrypted_credential = encrypted_credential + self.head_node_size = head_node_size + self.data_node_size = data_node_size + self.zookeeper_node_size = zookeeper_node_size self.type = 'HDInsightOnDemand' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py index 0e6e4c7471fc..c97f56ee975f 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/linked_service.py @@ -18,20 +18,21 @@ class LinkedService(Model): resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataLakeAnalyticsLinkedService, - HDInsightOnDemandLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - MariaDBLinkedService, MagentoLinkedService, JiraLinkedService, - ImpalaLinkedService, HubspotLinkedService, HiveLinkedService, - HBaseLinkedService, GreenplumLinkedService, GoogleBigQueryLinkedService, - EloquaLinkedService, DrillLinkedService, CouchbaseLinkedService, - ConcurLinkedService, AzurePostgreSqlLinkedService, AmazonMWSLinkedService, - SapHanaLinkedService, SapBWLinkedService, SftpServerLinkedService, - FtpServerLinkedService, HttpLinkedService, AzureSearchLinkedService, - CustomDataSourceLinkedService, AmazonRedshiftLinkedService, - AmazonS3LinkedService, SapEccLinkedService, + sub-classes are: AzureDatabricksLinkedService, + AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, MariaDBLinkedService, MagentoLinkedService, + JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, + HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, + GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, + CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, + AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, + SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, + AzureSearchLinkedService, CustomDataSourceLinkedService, + AmazonRedshiftLinkedService, AmazonS3LinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceLinkedService, AzureDataLakeStoreLinkedService, MongoDbLinkedService, CassandraLinkedService, WebLinkedService, ODataLinkedService, @@ -75,7 +76,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'Salesforce': 'SalesforceLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None): diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py index 67acbc319866..642975fcf5ef 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_error.py @@ -18,6 +18,9 @@ class ManagedIntegrationRuntimeError(Model): Variables are only populated by the server, and will be ignored when sending a request. + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] :ivar time: The time when the error occurred. :vartype time: datetime :ivar code: Error code. @@ -36,14 +39,16 @@ class ManagedIntegrationRuntimeError(Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'time': {'key': 'time', 'type': 'iso-8601'}, 'code': {'key': 'code', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '[str]'}, 'message': {'key': 'message', 'type': 'str'}, } - def __init__(self): + def __init__(self, additional_properties=None): super(ManagedIntegrationRuntimeError, self).__init__() + self.additional_properties = additional_properties self.time = None self.code = None self.parameters = None diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py index 4b1bf181cf47..306b51ec9e45 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_node.py @@ -18,6 +18,9 @@ class ManagedIntegrationRuntimeNode(Model): Variables are only populated by the server, and will be ignored when sending a request. + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] :ivar node_id: The managed integration runtime node id. :vartype node_id: str :ivar status: The managed integration runtime node status. Possible values @@ -35,13 +38,15 @@ class ManagedIntegrationRuntimeNode(Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'node_id': {'key': 'nodeId', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, } - def __init__(self, errors=None): + def __init__(self, additional_properties=None, errors=None): super(ManagedIntegrationRuntimeNode, self).__init__() + self.additional_properties = additional_properties self.node_id = None self.status = None self.errors = errors diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py index eba60cf1cc4d..83dc66fbb496 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/managed_integration_runtime_operation_result.py @@ -18,6 +18,9 @@ class ManagedIntegrationRuntimeOperationResult(Model): Variables are only populated by the server, and will be ignored when sending a request. + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] :ivar type: The operation type. Could be start or stop. :vartype type: str :ivar start_time: The start time of the operation. @@ -42,6 +45,7 @@ class ManagedIntegrationRuntimeOperationResult(Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'result': {'key': 'result', 'type': 'str'}, @@ -50,8 +54,9 @@ class ManagedIntegrationRuntimeOperationResult(Model): 'activity_id': {'key': 'activityId', 'type': 'str'}, } - def __init__(self): + def __init__(self, additional_properties=None): super(ManagedIntegrationRuntimeOperationResult, self).__init__() + self.additional_properties = additional_properties self.type = None self.start_time = None self.result = None diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py new file mode 100644 index 000000000000..8ab0af664672 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_linked_service.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class NetezzaLinkedService(LinkedService): + """Netezza linked service. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param type: Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, encrypted_credential=None): + super(NetezzaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.encrypted_credential = encrypted_credential + self.type = 'Netezza' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py new file mode 100644 index 000000000000..cf251c7e1174 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_source.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class NetezzaSource(CopySource): + """A copy activity Netezza source. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param type: Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None): + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait) + self.query = query + self.type = 'NetezzaSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py new file mode 100644 index 000000000000..0f1564aa8530 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/netezza_table_dataset.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class NetezzaTableDataset(Dataset): + """Netezza dataset. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param type: Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None): + super(NetezzaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations) + self.type = 'NetezzaTable' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py index 75e8ae765574..21d23ca9f9de 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py @@ -18,6 +18,9 @@ class SelfHostedIntegrationRuntimeNode(Model): Variables are only populated by the server, and will be ignored when sending a request. + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] :ivar node_name: Name of the integration runtime node. :vartype node_name: str :ivar machine_name: Machine name of the integration runtime node. @@ -92,6 +95,7 @@ class SelfHostedIntegrationRuntimeNode(Model): } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'node_name': {'key': 'nodeName', 'type': 'str'}, 'machine_name': {'key': 'machineName', 'type': 'str'}, 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, @@ -112,8 +116,9 @@ class SelfHostedIntegrationRuntimeNode(Model): 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, } - def __init__(self): + def __init__(self, additional_properties=None): super(SelfHostedIntegrationRuntimeNode, self).__init__() + self.additional_properties = additional_properties self.node_name = None self.machine_name = None self.host_service_uri = None diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py new file mode 100644 index 000000000000..8310c2f1b2dc --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_linked_service.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .linked_service import LinkedService + + +class VerticaLinkedService(LinkedService): + """Vertica linked service. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param type: Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. + :type connection_string: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, connection_string=None, encrypted_credential=None): + super(VerticaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations) + self.connection_string = connection_string + self.encrypted_credential = encrypted_credential + self.type = 'Vertica' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py new file mode 100644 index 000000000000..081b51fb0d99 --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_source.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .copy_source import CopySource + + +class VerticaSource(CopySource): + """A copy activity Vertica source. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param type: Constant filled by server. + :type type: str + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, additional_properties=None, source_retry_count=None, source_retry_wait=None, query=None): + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait) + self.query = query + self.type = 'VerticaSource' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py new file mode 100644 index 000000000000..04b6c209f19f --- /dev/null +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/models/vertica_table_dataset.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from .dataset import Dataset + + +class VerticaTableDataset(Dataset): + """Vertica dataset. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param type: Constant filled by server. + :type type: str + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None): + super(VerticaTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations) + self.type = 'VerticaTable' diff --git a/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py b/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py index 266f5a486d79..a410d4adb270 100644 --- a/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py +++ b/azure-mgmt-datafactory/azure/mgmt/datafactory/version.py @@ -9,5 +9,5 @@ # regenerated. # -------------------------------------------------------------------------- -VERSION = "0.5.0" +VERSION = "2017-09-01-preview"