Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AutoPR datafactory/resource-manager] Add new features to Swagger #1994

Merged
merged 5 commits into from
Feb 20, 2018
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
from .integration_runtime_status_list_response import IntegrationRuntimeStatusListResponse
from .update_integration_runtime_request import UpdateIntegrationRuntimeRequest
from .update_integration_runtime_node_request import UpdateIntegrationRuntimeNodeRequest
from .parameter_specification import ParameterSpecification
from .linked_service import LinkedService
from .linked_service_resource import LinkedServiceResource
from .parameter_specification import ParameterSpecification
from .dataset import Dataset
from .dataset_resource import DatasetResource
from .activity_dependency import ActivityDependency
Expand Down Expand Up @@ -291,6 +291,7 @@
from .copy_sink import CopySink
from .copy_activity import CopyActivity
from .execution_activity import ExecutionActivity
from .filter_activity import FilterActivity
from .until_activity import UntilActivity
from .wait_activity import WaitActivity
from .for_each_activity import ForEachActivity
Expand Down Expand Up @@ -407,9 +408,9 @@
'IntegrationRuntimeStatusListResponse',
'UpdateIntegrationRuntimeRequest',
'UpdateIntegrationRuntimeNodeRequest',
'ParameterSpecification',
'LinkedService',
'LinkedServiceResource',
'ParameterSpecification',
'Dataset',
'DatasetResource',
'ActivityDependency',
Expand Down Expand Up @@ -672,6 +673,7 @@
'CopySink',
'CopyActivity',
'ExecutionActivity',
'FilterActivity',
'UntilActivity',
'WaitActivity',
'ForEachActivity',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ class ActivityPolicy(Model):
:param retry_interval_in_seconds: Interval between each retry attempt (in
seconds). The default is 30 sec.
:type retry_interval_in_seconds: int
:param secure_output: When set to true, Output from activity is considered
as secure and will not be logged to monitoring.
:type secure_output: bool
"""

_validation = {
Expand All @@ -39,11 +42,13 @@ class ActivityPolicy(Model):
'timeout': {'key': 'timeout', 'type': 'object'},
'retry': {'key': 'retry', 'type': 'object'},
'retry_interval_in_seconds': {'key': 'retryIntervalInSeconds', 'type': 'int'},
'secure_output': {'key': 'secureOutput', 'type': 'bool'},
}

def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None):
def __init__(self, additional_properties=None, timeout=None, retry=None, retry_interval_in_seconds=None, secure_output=None):
super(ActivityPolicy, self).__init__()
self.additional_properties = additional_properties
self.timeout = timeout
self.retry = retry
self.retry_interval_in_seconds = retry_interval_in_seconds
self.secure_output = secure_output
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonMWSLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param endpoint: The endpoint of the Amazon MWS server, (i.e.
Expand Down Expand Up @@ -68,6 +74,8 @@ class AmazonMWSLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'},
'marketplace_id': {'key': 'typeProperties.marketplaceID', 'type': 'object'},
Expand All @@ -81,8 +89,8 @@ class AmazonMWSLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None):
super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, endpoint, marketplace_id, seller_id, access_key_id, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, mws_auth_token=None, secret_key=None, use_encrypted_endpoints=None, use_host_verification=None, use_peer_verification=None, encrypted_credential=None):
super(AmazonMWSLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.endpoint = endpoint
self.marketplace_id = marketplace_id
self.seller_id = seller_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AmazonMWSObjectDataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
"""
Expand All @@ -38,6 +41,6 @@ class AmazonMWSObjectDataset(Dataset):
'type': {'required': True},
}

def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None):
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None):
super(AmazonMWSObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.type = 'AmazonMWSObject'
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonRedshiftLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param server: The name of the Amazon Redshift server. Type: string (or
Expand Down Expand Up @@ -56,6 +62,8 @@ class AmazonRedshiftLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'server': {'key': 'typeProperties.server', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
Expand All @@ -65,8 +73,8 @@ class AmazonRedshiftLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, server, database, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, username=None, password=None, port=None, encrypted_credential=None):
super(AmazonRedshiftLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.server = server
self.username = username
self.password = password
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AmazonS3Dataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param bucket_name: The name of the Amazon S3 bucket. Type: string (or
Expand Down Expand Up @@ -62,6 +65,7 @@ class AmazonS3Dataset(Dataset):
'structure': {'key': 'structure', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'},
'key': {'key': 'typeProperties.key', 'type': 'object'},
Expand All @@ -71,8 +75,8 @@ class AmazonS3Dataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, parameters=None, key=None, prefix=None, version=None, format=None, compression=None):
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, bucket_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None, key=None, prefix=None, version=None, format=None, compression=None):
super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.bucket_name = bucket_name
self.key = key
self.prefix = prefix
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AmazonS3LinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param access_key_id: The access key identifier of the Amazon S3 Identity
Expand All @@ -46,14 +52,16 @@ class AmazonS3LinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'},
'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, additional_properties=None, connect_via=None, description=None, access_key_id=None, secret_access_key=None, encrypted_credential=None):
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, encrypted_credential=None):
super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
self.encrypted_credential = encrypted_credential
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AzureBatchLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param account_name: The Azure Batch account name. Type: string (or
Expand Down Expand Up @@ -57,6 +63,8 @@ class AzureBatchLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'account_name': {'key': 'typeProperties.accountName', 'type': 'object'},
'access_key': {'key': 'typeProperties.accessKey', 'type': 'SecretBase'},
Expand All @@ -66,8 +74,8 @@ class AzureBatchLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, access_key=None, encrypted_credential=None):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, account_name, batch_uri, pool_name, linked_service_name, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, access_key=None, encrypted_credential=None):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.account_name = account_name
self.access_key = access_key
self.batch_uri = batch_uri
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AzureBlobDataset(Dataset):
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param folder_path: The path of the Azure Blob storage. Type: string (or
Expand Down Expand Up @@ -57,6 +60,7 @@ class AzureBlobDataset(Dataset):
'structure': {'key': 'structure', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'},
'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'},
Expand All @@ -65,8 +69,8 @@ class AzureBlobDataset(Dataset):
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
}

def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None):
super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters)
def __init__(self, linked_service_name, additional_properties=None, description=None, structure=None, parameters=None, annotations=None, folder_path=None, table_root_location=None, file_name=None, format=None, compression=None):
super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations)
self.folder_path = folder_path
self.table_root_location = table_root_location
self.file_name = file_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService):
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param type: Constant filled by server.
:type type: str
:param account_name: The Azure Data Lake Analytics account name. Type:
Expand Down Expand Up @@ -65,6 +71,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService):
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'account_name': {'key': 'typeProperties.accountName', 'type': 'object'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
Expand All @@ -76,8 +84,8 @@ class AzureDataLakeAnalyticsLinkedService(LinkedService):
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}

def __init__(self, account_name, tenant, additional_properties=None, connect_via=None, description=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None):
super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description)
def __init__(self, account_name, tenant, additional_properties=None, connect_via=None, description=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, subscription_id=None, resource_group_name=None, data_lake_analytics_uri=None, encrypted_credential=None):
super(AzureDataLakeAnalyticsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations)
self.account_name = account_name
self.service_principal_id = service_principal_id
self.service_principal_key = service_principal_key
Expand Down
Loading