Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AutoPR] datafactory/resource-manager #2011

Merged
merged 3 commits into from
Mar 22, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse
from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest
from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest
from .parameter_specification import ParameterSpecification
from .linked_service import LinkedService
from .linked_service_resource import LinkedServiceResource
from .parameter_specification import ParameterSpecification
from .dataset import Dataset
from .dataset_resource import DatasetResource
from .activity_dependency import ActivityDependency
Expand Down Expand Up @@ -57,8 +57,10 @@
from .operation_service_specification import OperationServiceSpecification
from .operation import Operation
from .operation_list_response import OperationListResponse
from .azure_databricks_linked_service import AzureDatabricksLinkedService
from .azure_data_lake_analytics_linked_service import AzureDataLakeAnalyticsLinkedService
from .hd_insight_on_demand_linked_service import HDInsightOnDemandLinkedService
from .salesforce_marketing_cloud_linked_service import SalesforceMarketingCloudLinkedService
from .netezza_linked_service import NetezzaLinkedService
from .vertica_linked_service import VerticaLinkedService
from .zoho_linked_service import ZohoLinkedService
Expand Down Expand Up @@ -128,6 +130,7 @@
from .sql_server_linked_service import SqlServerLinkedService
from .azure_sql_dw_linked_service import AzureSqlDWLinkedService
from .azure_storage_linked_service import AzureStorageLinkedService
from .salesforce_marketing_cloud_object_dataset import SalesforceMarketingCloudObjectDataset
from .vertica_table_dataset import VerticaTableDataset
from .netezza_table_dataset import NetezzaTableDataset
from .zoho_object_dataset import ZohoObjectDataset
Expand Down Expand Up @@ -199,6 +202,7 @@
from .schedule_trigger import ScheduleTrigger
from .multiple_pipeline_trigger import MultiplePipelineTrigger
from .activity_policy import ActivityPolicy
from .databricks_notebook_activity import DatabricksNotebookActivity
from .data_lake_analytics_usql_activity import DataLakeAnalyticsUSQLActivity
from .azure_ml_update_resource_activity import AzureMLUpdateResourceActivity
from .azure_ml_web_service_file import AzureMLWebServiceFile
Expand All @@ -208,6 +212,7 @@
from .web_activity import WebActivity
from .redshift_unload_settings import RedshiftUnloadSettings
from .amazon_redshift_source import AmazonRedshiftSource
from .salesforce_marketing_cloud_source import SalesforceMarketingCloudSource
from .vertica_source import VerticaSource
from .netezza_source import NetezzaSource
from .zoho_source import ZohoSource
Expand Down Expand Up @@ -291,6 +296,7 @@
from .copy_sink import CopySink
from .copy_activity import CopyActivity
from .execution_activity import ExecutionActivity
from .filter_activity import FilterActivity
from .until_activity import UntilActivity
from .wait_activity import WaitActivity
from .for_each_activity import ForEachActivity
Expand Down Expand Up @@ -407,9 +413,9 @@
'IntegrationRuntimeStatusListResponse',
'UpdateIntegrationRuntimeRequest',
'UpdateIntegrationRuntimeNodeRequest',
'ParameterSpecification',
'LinkedService',
'LinkedServiceResource',
'ParameterSpecification',
'Dataset',
'DatasetResource',
'ActivityDependency',
Expand Down Expand Up @@ -438,8 +444,10 @@
'OperationServiceSpecification',
'Operation',
'OperationListResponse',
'AzureDatabricksLinkedService',
'AzureDataLakeAnalyticsLinkedService',
'HDInsightOnDemandLinkedService',
'SalesforceMarketingCloudLinkedService',
'NetezzaLinkedService',
'VerticaLinkedService',
'ZohoLinkedService',
Expand Down Expand Up @@ -509,6 +517,7 @@
'SqlServerLinkedService',
'AzureSqlDWLinkedService',
'AzureStorageLinkedService',
'SalesforceMarketingCloudObjectDataset',
'VerticaTableDataset',
'NetezzaTableDataset',
'ZohoObjectDataset',
Expand Down Expand Up @@ -580,6 +589,7 @@
'ScheduleTrigger',
'MultiplePipelineTrigger',
'ActivityPolicy',
'DatabricksNotebookActivity',
'DataLakeAnalyticsUSQLActivity',
'AzureMLUpdateResourceActivity',
'AzureMLWebServiceFile',
Expand All @@ -589,6 +599,7 @@
'WebActivity',
'RedshiftUnloadSettings',
'AmazonRedshiftSource',
'SalesforceMarketingCloudSource',
'VerticaSource',
'NetezzaSource',
'ZohoSource',
Expand Down Expand Up @@ -672,6 +683,7 @@
'CopySink',
'CopyActivity',
'ExecutionActivity',
'FilterActivity',
'UntilActivity',
'WaitActivity',
'ForEachActivity',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ class ActivityPolicy(Model):
:param retry_interval_in_seconds: Interval between each retry attempt (in
seconds). The default is 30 sec.
:type retry_interval_in_seconds: int
:param secure_output: When set to true, Output from activity is considered
as secure and will not be logged to monitoring.
:type secure_output: bool
"""

_validation = {
Expand All @@ -39,11 +42,13 @@ class ActivityPolicy(Model):
'timeout': {'key': 'timeout', 'type': 'object'},
'retry': {'key': 'retry', 'type': 'object'},
'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'},
'secure_output': {'key': 'secureOutput', 'type': 'bool'},
}

def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None):
def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None, secure_output=None):
super(ActivityPolicy, self).__init__()
self.additional_properties = additional_properties
self.timeout = timeout
self.retry = retry
self.retry_interval_in_seconds = retry_interval_in_seconds
self.secure_output = secure_output
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonMWSLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param endpoint: The endpoint of the Amazon MWS server, (i.e.
Expand Down Expand Up @@ -68,6 +74,8 @@ class AmazonMWSLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'},
'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'},
Expand All @@ -81,8 +89,8 @@ class AmazonMWSLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None):
super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None):
super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.endpoint = endpoint
self.marketplace_id = marketplace_id
self.seller_id = seller_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AmazonMWSObjectDataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
"""
Expand All @@ -38,6 +41,6 @@ class AmazonMWSObjectDataset(Dataset):
'type': {'required': True},
}

def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None):
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None):
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.type = 'AmazonMWSObject'
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonRedshiftLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param server: The name of the Amazon Redshift server. Type: string (or
Expand Down Expand Up @@ -56,6 +62,8 @@ class AmazonRedshiftLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'server': {'key': 'typeProperties.server', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
Expand All @@ -65,8 +73,8 @@ class AmazonRedshiftLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.server = server
self.username = username
self.password = password
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AmazonS3Dataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param bucket_name: The name of the Amazon S3 bucket. Type: string (or
Expand Down Expand Up @@ -62,6 +65,7 @@ class AmazonS3Dataset(Dataset):
'structure': {'key': 'structure', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'},
'key': {'key': 'typeProperties.key', 'type': 'object'},
Expand All @@ -71,8 +75,8 @@ class AmazonS3Dataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, parameters=None, key=None, prefix=None, version=None, format=None, compression=None):
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None, key=None, prefix=None, version=None, format=None, compression=None):
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.bucket_name = bucket_name
self.key = key
self.prefix = prefix
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonS3LinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param access_key_id: The access key identifier of the Amazon S3 Identity
Expand All @@ -46,14 +52,16 @@ class AmazonS3LinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'},
'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, additional_properties=None, connect_via=None, description=None, access_key_id=None, secret_access_key=None, encrypted_credential=None):
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None):
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
self.encrypted_credential = encrypted_credential
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AzureBatchLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param account_name: The Azure Batch account name. Type: string (or
Expand Down Expand Up @@ -57,6 +63,8 @@ class AzureBatchLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'account_name': {'key': 'typeProperties.accountName', 'type': 'object'},
'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'},
Expand All @@ -66,8 +74,8 @@ class AzureBatchLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, access_key=None, encrypted_credential=None):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.account_name = account_name
self.access_key = access_key
self.batch_uri = batch_uri
Expand Down
Loading