diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index 041d591b083e..910ec049493a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -1,11 +1,11 @@ { - "commit": "ed84b11847785792767b0b84cc6f98f4ea08ca77", + "commit": "55c40f57f0057d9d3d17d0819cdda5174bcbd26d", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest": "3.9.7", "use": [ "@autorest/python@6.7.1", "@autorest/modelerfour@4.26.2" ], - "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.7.1 --use=@autorest/modelerfour@4.26.2 --version=3.9.7 --version-tolerant=False", "readme": "specification/datafactory/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 77f53a3589c6..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "4.0.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 702f57a07cff..d2e400109586 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -587,8 +587,16 @@ from ._models_py3 import SalesforceServiceCloudObjectDataset from ._models_py3 import SalesforceServiceCloudSink from ._models_py3 import SalesforceServiceCloudSource +from ._models_py3 import SalesforceServiceCloudV2LinkedService +from ._models_py3 import SalesforceServiceCloudV2ObjectDataset +from ._models_py3 import SalesforceServiceCloudV2Sink +from ._models_py3 import SalesforceServiceCloudV2Source from ._models_py3 import SalesforceSink from ._models_py3 import SalesforceSource +from ._models_py3 import SalesforceV2LinkedService +from ._models_py3 import SalesforceV2ObjectDataset +from ._models_py3 import SalesforceV2Sink +from ._models_py3 import SalesforceV2Source from ._models_py3 import SapBWLinkedService from ._models_py3 import SapBwCubeDataset from ._models_py3 import SapBwSource @@ -839,6 +847,8 @@ from ._data_factory_management_client_enums import RunQueryOrderByField from ._data_factory_management_client_enums import SalesforceSinkWriteBehavior from ._data_factory_management_client_enums import SalesforceSourceReadBehavior +from ._data_factory_management_client_enums import SalesforceV2SinkWriteBehavior +from ._data_factory_management_client_enums import SalesforceV2SourceReadBehavior from ._data_factory_management_client_enums import SapCloudForCustomerSinkWriteBehavior from ._data_factory_management_client_enums import SapHanaAuthenticationType from ._data_factory_management_client_enums import SapHanaPartitionOption @@ -1464,8 +1474,16 @@ "SalesforceServiceCloudObjectDataset", "SalesforceServiceCloudSink", "SalesforceServiceCloudSource", + "SalesforceServiceCloudV2LinkedService", + "SalesforceServiceCloudV2ObjectDataset", + "SalesforceServiceCloudV2Sink", + "SalesforceServiceCloudV2Source", "SalesforceSink", "SalesforceSource", + "SalesforceV2LinkedService", + "SalesforceV2ObjectDataset", + "SalesforceV2Sink", + "SalesforceV2Source", "SapBWLinkedService", "SapBwCubeDataset", "SapBwSource", @@ -1715,6 +1733,8 @@ "RunQueryOrderByField", "SalesforceSinkWriteBehavior", "SalesforceSourceReadBehavior", + "SalesforceV2SinkWriteBehavior", + "SalesforceV2SourceReadBehavior", "SapCloudForCustomerSinkWriteBehavior", "SapHanaAuthenticationType", "SapHanaPartitionOption", diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index b0ad5505dd4a..70f0e4e6417e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -721,6 +721,20 @@ class SalesforceSourceReadBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta) QUERY_ALL = "QueryAll" +class SalesforceV2SinkWriteBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The write behavior for the operation. Default is Insert.""" + + INSERT = "Insert" + UPSERT = "Upsert" + + +class SalesforceV2SourceReadBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Salesforce read behavior for the operation.""" + + QUERY = "query" + QUERY_ALL = "queryAll" + + class SapCloudForCustomerSinkWriteBehavior(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The write behavior for the operation. Default is 'Insert'.""" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 6b8186c03b06..f864888431cf 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -494,7 +494,8 @@ class LinkedService(_serialization.Model): PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, QuickbaseLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, - SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, + SalesforceServiceCloudLinkedService, SalesforceServiceCloudV2LinkedService, + SalesforceV2LinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOdpLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SmartsheetLinkedService, @@ -624,6 +625,8 @@ class LinkedService(_serialization.Model): "Salesforce": "SalesforceLinkedService", "SalesforceMarketingCloud": "SalesforceMarketingCloudLinkedService", "SalesforceServiceCloud": "SalesforceServiceCloudLinkedService", + "SalesforceServiceCloudV2": "SalesforceServiceCloudV2LinkedService", + "SalesforceV2": "SalesforceV2LinkedService", "SapBW": "SapBWLinkedService", "SapCloudForCustomer": "SapCloudForCustomerLinkedService", "SapEcc": "SapEccLinkedService", @@ -865,7 +868,8 @@ class Dataset(_serialization.Model): OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, - SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, + SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, + SalesforceServiceCloudV2ObjectDataset, SalesforceV2ObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOdpResourceDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, @@ -994,6 +998,8 @@ class Dataset(_serialization.Model): "SalesforceMarketingCloudObject": "SalesforceMarketingCloudObjectDataset", "SalesforceObject": "SalesforceObjectDataset", "SalesforceServiceCloudObject": "SalesforceServiceCloudObjectDataset", + "SalesforceServiceCloudV2Object": "SalesforceServiceCloudV2ObjectDataset", + "SalesforceV2Object": "SalesforceV2ObjectDataset", "SapBwCube": "SapBwCubeDataset", "SapCloudForCustomerResource": "SapCloudForCustomerResourceDataset", "SapEccResource": "SapEccResourceDataset", @@ -1178,8 +1184,8 @@ class CopySource(_serialization.Model): ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, LakeHouseTableSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, - SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, - WebSource, XmlSource + SalesforceServiceCloudSource, SalesforceServiceCloudV2Source, SharePointOnlineListSource, + SnowflakeSource, TabularSource, WebSource, XmlSource All required parameters must be populated in order to send to Azure. @@ -1250,6 +1256,7 @@ class CopySource(_serialization.Model): "RelationalSource": "RelationalSource", "RestSource": "RestSource", "SalesforceServiceCloudSource": "SalesforceServiceCloudSource", + "SalesforceServiceCloudV2Source": "SalesforceServiceCloudV2Source", "SharePointOnlineListSource": "SharePointOnlineListSource", "SnowflakeSource": "SnowflakeSource", "TabularSource": "TabularSource", @@ -1305,10 +1312,11 @@ class TabularSource(CopySource): HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, - SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, - SapEccSource, SapHanaSource, SapOdpSource, SapOpenHubSource, SapTableSource, ServiceNowSource, - ShopifySource, SparkSource, SqlDWSource, SqlMISource, SqlServerSource, SqlSource, SquareSource, - SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource + SalesforceMarketingCloudSource, SalesforceSource, SalesforceV2Source, SapBwSource, + SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOdpSource, SapOpenHubSource, + SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDWSource, SqlMISource, + SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, + XeroSource, ZohoSource All required parameters must be populated in order to send to Azure. @@ -1393,6 +1401,7 @@ class TabularSource(CopySource): "ResponsysSource": "ResponsysSource", "SalesforceMarketingCloudSource": "SalesforceMarketingCloudSource", "SalesforceSource": "SalesforceSource", + "SalesforceV2Source": "SalesforceV2Source", "SapBwSource": "SapBwSource", "SapCloudForCustomerSource": "SapCloudForCustomerSource", "SapEccSource": "SapEccSource", @@ -4344,8 +4353,9 @@ class CopySink(_serialization.Model): CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, LakeHouseTableSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, - OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, - SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink + OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, + SalesforceServiceCloudV2Sink, SalesforceSink, SalesforceV2Sink, SapCloudForCustomerSink, + SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink All required parameters must be populated in order to send to Azure. @@ -4424,7 +4434,9 @@ class CopySink(_serialization.Model): "ParquetSink": "ParquetSink", "RestSink": "RestSink", "SalesforceServiceCloudSink": "SalesforceServiceCloudSink", + "SalesforceServiceCloudV2Sink": "SalesforceServiceCloudV2Sink", "SalesforceSink": "SalesforceSink", + "SalesforceV2Sink": "SalesforceV2Sink", "SapCloudForCustomerSink": "SapCloudForCustomerSink", "SnowflakeSink": "SnowflakeSink", "SqlDWSink": "SqlDWSink", @@ -5889,6 +5901,9 @@ class StoreWriteSettings(_serialization.Model): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -5901,6 +5916,7 @@ class StoreWriteSettings(_serialization.Model): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, } _subtype_map = { @@ -5922,6 +5938,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, **kwargs: Any ) -> None: """ @@ -5936,6 +5953,9 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ super().__init__(**kwargs) self.additional_properties = additional_properties @@ -5943,6 +5963,7 @@ def __init__( self.max_concurrent_connections = max_concurrent_connections self.disable_metrics_collection = disable_metrics_collection self.copy_behavior = copy_behavior + self.metadata = metadata class AzureBlobFSWriteSettings(StoreWriteSettings): @@ -5963,6 +5984,9 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). :vartype block_size_in_mb: JSON @@ -5978,6 +6002,7 @@ class AzureBlobFSWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, "block_size_in_mb": {"key": "blockSizeInMB", "type": "object"}, } @@ -5988,6 +6013,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, block_size_in_mb: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -6003,6 +6029,9 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). :paramtype block_size_in_mb: JSON @@ -6012,6 +6041,7 @@ def __init__( max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "AzureBlobFSWriteSettings" @@ -6443,6 +6473,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :ivar block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). :vartype block_size_in_mb: JSON @@ -6458,6 +6491,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, "block_size_in_mb": {"key": "blockSizeInMB", "type": "object"}, } @@ -6468,6 +6502,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, block_size_in_mb: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -6483,6 +6518,9 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :keyword block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). :paramtype block_size_in_mb: JSON @@ -6492,6 +6530,7 @@ def __init__( max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "AzureBlobStorageWriteSettings" @@ -8960,6 +8999,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :ivar expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). @@ -8976,6 +9018,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, "expiry_date_time": {"key": "expiryDateTime", "type": "object"}, } @@ -8986,6 +9029,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, expiry_date_time: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -9001,6 +9045,9 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :keyword expiry_date_time: Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). @@ -9011,6 +9058,7 @@ def __init__( max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "AzureDataLakeStoreWriteSettings" @@ -9387,6 +9435,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -9399,6 +9450,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, } def __init__( @@ -9408,6 +9460,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, **kwargs: Any ) -> None: """ @@ -9422,12 +9475,16 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ super().__init__( additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "AzureFileStorageWriteSettings" @@ -9471,7 +9528,7 @@ class AzureFunctionActivity(ExecutionActivity): # pylint: disable=too-many-inst :ivar headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :vartype headers: JSON + :vartype headers: dict[str, str] :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :vartype body: JSON @@ -9497,7 +9554,7 @@ class AzureFunctionActivity(ExecutionActivity): # pylint: disable=too-many-inst "policy": {"key": "policy", "type": "ActivityPolicy"}, "method": {"key": "typeProperties.method", "type": "str"}, "function_name": {"key": "typeProperties.functionName", "type": "object"}, - "headers": {"key": "typeProperties.headers", "type": "object"}, + "headers": {"key": "typeProperties.headers", "type": "{str}"}, "body": {"key": "typeProperties.body", "type": "object"}, } @@ -9515,7 +9572,7 @@ def __init__( user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - headers: Optional[JSON] = None, + headers: Optional[Dict[str, str]] = None, body: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -9551,7 +9608,7 @@ def __init__( :keyword headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :paramtype headers: JSON + :paramtype headers: dict[str, str] :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :paramtype body: JSON @@ -9594,7 +9651,8 @@ class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-ins :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] :ivar function_app_url: The endpoint of the Azure Function App. URL will be in the format - https://:code:``.azurewebsites.net. Required. + https://:code:``.azurewebsites.net. Type: string (or Expression with resultType + string). Required. :vartype function_app_url: JSON :ivar function_key: Function or Host key for Azure Function App. :vartype function_key: ~azure.mgmt.datafactory.models.SecretBase @@ -9603,7 +9661,8 @@ class AzureFunctionLinkedService(LinkedService): # pylint: disable=too-many-ins :vartype encrypted_credential: str :ivar credential: The credential reference containing authentication information. :vartype credential: ~azure.mgmt.datafactory.models.CredentialReference - :ivar resource_id: Allowed token audiences for azure function. + :ivar resource_id: Allowed token audiences for azure function. Type: string (or Expression with + resultType string). :vartype resource_id: JSON :ivar authentication: Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). @@ -9659,7 +9718,8 @@ def __init__( :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] :keyword function_app_url: The endpoint of the Azure Function App. URL will be in the format - https://:code:``.azurewebsites.net. Required. + https://:code:``.azurewebsites.net. Type: string (or Expression with resultType + string). Required. :paramtype function_app_url: JSON :keyword function_key: Function or Host key for Azure Function App. :paramtype function_key: ~azure.mgmt.datafactory.models.SecretBase @@ -9668,7 +9728,8 @@ def __init__( :paramtype encrypted_credential: str :keyword credential: The credential reference containing authentication information. :paramtype credential: ~azure.mgmt.datafactory.models.CredentialReference - :keyword resource_id: Allowed token audiences for azure function. + :keyword resource_id: Allowed token audiences for azure function. Type: string (or Expression + with resultType string). :paramtype resource_id: JSON :keyword authentication: Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). @@ -10343,7 +10404,7 @@ class AzureMLExecutePipelineActivity(ExecutionActivity): # pylint: disable=too- :vartype ml_pipeline_parameters: JSON :ivar data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline - execution request. Type: object with key value pairs (or Expression with resultType object). + execution request. Type: object (or Expression with resultType object). :vartype data_path_assignments: JSON :ivar ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string @@ -10446,7 +10507,7 @@ def __init__( :paramtype ml_pipeline_parameters: JSON :keyword data_path_assignments: Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline - execution request. Type: object with key value pairs (or Expression with resultType object). + execution request. Type: object (or Expression with resultType object). :paramtype data_path_assignments: JSON :keyword ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string @@ -13064,7 +13125,8 @@ class AzureSqlSource(TabularSource): # pylint: disable=too-many-instance-attrib :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -13150,7 +13212,8 @@ def __init__( :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -15679,9 +15742,9 @@ class CmdkeySetup(CustomSetupBase): :ivar type: The type of custom setup. Required. :vartype type: str - :ivar target_name: The server name of data source access. Required. + :ivar target_name: The server name of data source access. Type: string. Required. :vartype target_name: JSON - :ivar user_name: The user name of data source access. Required. + :ivar user_name: The user name of data source access. Type: string. Required. :vartype user_name: JSON :ivar password: The password of data source access. Required. :vartype password: ~azure.mgmt.datafactory.models.SecretBase @@ -15703,9 +15766,9 @@ class CmdkeySetup(CustomSetupBase): def __init__(self, *, target_name: JSON, user_name: JSON, password: "_models.SecretBase", **kwargs: Any) -> None: """ - :keyword target_name: The server name of data source access. Required. + :keyword target_name: The server name of data source access. Type: string. Required. :paramtype target_name: JSON - :keyword user_name: The user name of data source access. Required. + :keyword user_name: The user name of data source access. Type: string. Required. :paramtype user_name: JSON :keyword password: The password of data source access. Required. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase @@ -27071,6 +27134,9 @@ class FileServerWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -27083,6 +27149,7 @@ class FileServerWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, } def __init__( @@ -27092,6 +27159,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, **kwargs: Any ) -> None: """ @@ -27106,12 +27174,16 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ super().__init__( additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "FileServerWriteSettings" @@ -31334,8 +31406,8 @@ class HDInsightOnDemandLinkedService(LinkedService): # pylint: disable=too-many :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] - :ivar cluster_size: Number of worker/data nodes in the cluster. Suggestion value: 4. Type: - string (or Expression with resultType string). Required. + :ivar cluster_size: Number of worker/data nodes in the cluster. Suggestion value: 4. Type: int + (or Expression with resultType int). Required. :vartype cluster_size: JSON :ivar time_to_live: The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there @@ -31552,7 +31624,7 @@ def __init__( # pylint: disable=too-many-locals :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] :keyword cluster_size: Number of worker/data nodes in the cluster. Suggestion value: 4. Type: - string (or Expression with resultType string). Required. + int (or Expression with resultType int). Required. :paramtype cluster_size: JSON :keyword time_to_live: The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there @@ -32836,7 +32908,7 @@ class HttpLinkedService(LinkedService): # pylint: disable=too-many-instance-att authentication. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). + authorization. Type: key value pairs (value should be string type). :vartype auth_headers: JSON :ivar embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or @@ -32926,7 +32998,7 @@ def __init__( EmbeddedCertData authentication. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). + authorization. Type: key value pairs (value should be string type). :paramtype auth_headers: JSON :keyword embedded_cert_data: Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either @@ -37272,6 +37344,9 @@ class LakeHouseWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ _validation = { @@ -37284,6 +37359,7 @@ class LakeHouseWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, } def __init__( @@ -37293,6 +37369,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, **kwargs: Any ) -> None: """ @@ -37307,12 +37384,16 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] """ super().__init__( additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "LakeHouseWriteSettings" @@ -39516,7 +39597,7 @@ def __init__( self.script_lines = script_lines -class MariaDBLinkedService(LinkedService): +class MariaDBLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """MariaDB server linked service. All required parameters must be populated in order to send to Azure. @@ -39534,11 +39615,23 @@ class MariaDBLinkedService(LinkedService): :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] + :ivar driver_version: The version of the MariaDB driver. Type: string. V1 or empty for legacy + driver, V2 for new driver. V1 can support connection string and property bag, V2 can only + support connection string. + :vartype driver_version: JSON :ivar connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :vartype connection_string: JSON - :ivar pwd: The Azure key vault secret reference of password in connection string. - :vartype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :ivar server: Server name for connection. Type: string. + :vartype server: JSON + :ivar port: The port for the connection. Type: integer. + :vartype port: JSON + :ivar username: Username for authentication. Type: string. + :vartype username: JSON + :ivar database: Database name for connection. Type: string. + :vartype database: JSON + :ivar password: The Azure key vault secret reference of password in connection string. + :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :vartype encrypted_credential: str @@ -39555,8 +39648,13 @@ class MariaDBLinkedService(LinkedService): "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, + "driver_version": {"key": "typeProperties.driverVersion", "type": "object"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, - "pwd": {"key": "typeProperties.pwd", "type": "AzureKeyVaultSecretReference"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -39568,8 +39666,13 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, + driver_version: Optional[JSON] = None, connection_string: Optional[JSON] = None, - pwd: Optional["_models.AzureKeyVaultSecretReference"] = None, + server: Optional[JSON] = None, + port: Optional[JSON] = None, + username: Optional[JSON] = None, + database: Optional[JSON] = None, + password: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any ) -> None: @@ -39585,11 +39688,23 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] + :keyword driver_version: The version of the MariaDB driver. Type: string. V1 or empty for + legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can + only support connection string. + :paramtype driver_version: JSON :keyword connection_string: An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :paramtype connection_string: JSON - :keyword pwd: The Azure key vault secret reference of password in connection string. - :paramtype pwd: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :keyword server: Server name for connection. Type: string. + :paramtype server: JSON + :keyword port: The port for the connection. Type: integer. + :paramtype port: JSON + :keyword username: Username for authentication. Type: string. + :paramtype username: JSON + :keyword database: Database name for connection. Type: string. + :paramtype database: JSON + :keyword password: The Azure key vault secret reference of password in connection string. + :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. :paramtype encrypted_credential: str @@ -39603,8 +39718,13 @@ def __init__( **kwargs ) self.type: str = "MariaDB" + self.driver_version = driver_version self.connection_string = connection_string - self.pwd = pwd + self.server = server + self.port = port + self.username = username + self.database = database + self.password = password self.encrypted_credential = encrypted_credential @@ -41871,7 +41991,7 @@ def __init__( self.additional_columns = additional_columns -class MySqlLinkedService(LinkedService): +class MySqlLinkedService(LinkedService): # pylint: disable=too-many-instance-attributes """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -41889,9 +42009,27 @@ class MySqlLinkedService(LinkedService): :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :ivar annotations: List of tags that can be used for describing the linked service. :vartype annotations: list[JSON] + :ivar driver_version: The version of the MySQL driver. Type: string. V1 or empty for legacy + driver, V2 for new driver. V1 can support connection string and property bag, V2 can only + support connection string. + :vartype driver_version: JSON :ivar connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Required. + AzureKeyVaultSecretReference. :vartype connection_string: JSON + :ivar server: Server name for connection. Type: string. + :vartype server: JSON + :ivar port: The port for the connection. Type: integer. + :vartype port: JSON + :ivar username: Username for authentication. Type: string. + :vartype username: JSON + :ivar database: Database name for connection. Type: string. + :vartype database: JSON + :ivar ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1: prefer, 2: require, 3: + verify-ca, 4: verify-full. + :vartype ssl_mode: JSON + :ivar use_system_trust_store: Use system trust store for connection. Type: integer. 0: enable, + 1: disable. + :vartype use_system_trust_store: JSON :ivar password: The Azure key vault secret reference of password in connection string. :vartype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -41901,7 +42039,6 @@ class MySqlLinkedService(LinkedService): _validation = { "type": {"required": True}, - "connection_string": {"required": True}, } _attribute_map = { @@ -41911,7 +42048,14 @@ class MySqlLinkedService(LinkedService): "description": {"key": "description", "type": "str"}, "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, "annotations": {"key": "annotations", "type": "[object]"}, + "driver_version": {"key": "typeProperties.driverVersion", "type": "object"}, "connection_string": {"key": "typeProperties.connectionString", "type": "object"}, + "server": {"key": "typeProperties.server", "type": "object"}, + "port": {"key": "typeProperties.port", "type": "object"}, + "username": {"key": "typeProperties.username", "type": "object"}, + "database": {"key": "typeProperties.database", "type": "object"}, + "ssl_mode": {"key": "typeProperties.sslMode", "type": "object"}, + "use_system_trust_store": {"key": "typeProperties.useSystemTrustStore", "type": "object"}, "password": {"key": "typeProperties.password", "type": "AzureKeyVaultSecretReference"}, "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, } @@ -41919,12 +42063,19 @@ class MySqlLinkedService(LinkedService): def __init__( self, *, - connection_string: JSON, additional_properties: Optional[Dict[str, JSON]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, annotations: Optional[List[JSON]] = None, + driver_version: Optional[JSON] = None, + connection_string: Optional[JSON] = None, + server: Optional[JSON] = None, + port: Optional[JSON] = None, + username: Optional[JSON] = None, + database: Optional[JSON] = None, + ssl_mode: Optional[JSON] = None, + use_system_trust_store: Optional[JSON] = None, password: Optional["_models.AzureKeyVaultSecretReference"] = None, encrypted_credential: Optional[str] = None, **kwargs: Any @@ -41941,9 +42092,27 @@ def __init__( :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :keyword annotations: List of tags that can be used for describing the linked service. :paramtype annotations: list[JSON] + :keyword driver_version: The version of the MySQL driver. Type: string. V1 or empty for legacy + driver, V2 for new driver. V1 can support connection string and property bag, V2 can only + support connection string. + :paramtype driver_version: JSON :keyword connection_string: The connection string. Type: string, SecureString or - AzureKeyVaultSecretReference. Required. + AzureKeyVaultSecretReference. :paramtype connection_string: JSON + :keyword server: Server name for connection. Type: string. + :paramtype server: JSON + :keyword port: The port for the connection. Type: integer. + :paramtype port: JSON + :keyword username: Username for authentication. Type: string. + :paramtype username: JSON + :keyword database: Database name for connection. Type: string. + :paramtype database: JSON + :keyword ssl_mode: SSL mode for connection. Type: integer. 0: disable, 1: prefer, 2: require, + 3: verify-ca, 4: verify-full. + :paramtype ssl_mode: JSON + :keyword use_system_trust_store: Use system trust store for connection. Type: integer. 0: + enable, 1: disable. + :paramtype use_system_trust_store: JSON :keyword password: The Azure key vault secret reference of password in connection string. :paramtype password: ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference :keyword encrypted_credential: The encrypted credential used for authentication. Credentials @@ -41959,7 +42128,14 @@ def __init__( **kwargs ) self.type: str = "MySql" + self.driver_version = driver_version self.connection_string = connection_string + self.server = server + self.port = port + self.username = username + self.database = database + self.ssl_mode = ssl_mode + self.use_system_trust_store = use_system_trust_store self.password = password self.encrypted_credential = encrypted_credential @@ -42611,7 +42787,7 @@ class ODataLinkedService(LinkedService): # pylint: disable=too-many-instance-at :ivar password: Password of the OData service. :vartype password: ~azure.mgmt.datafactory.models.SecretBase :ivar auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). + authorization. Type: key value pairs (value should be string type). :vartype auth_headers: JSON :ivar tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). @@ -42731,7 +42907,7 @@ def __init__( :keyword password: Password of the OData service. :paramtype password: ~azure.mgmt.datafactory.models.SecretBase :keyword auth_headers: The additional HTTP headers in the request to RESTful API used for - authorization. Type: object (or Expression with resultType object). + authorization. Type: key value pairs (value should be string type). :paramtype auth_headers: JSON :keyword tenant: Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). @@ -50553,7 +50729,7 @@ class RestSink(CopySink): # pylint: disable=too-many-instance-attributes string (or Expression with resultType string). :vartype request_method: JSON :ivar additional_headers: The additional HTTP headers in the request to the RESTful API. Type: - string (or Expression with resultType string). + key value pairs (value should be string type). :vartype additional_headers: JSON :ivar http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string @@ -50563,7 +50739,8 @@ class RestSink(CopySink): # pylint: disable=too-many-instance-attributes :ivar request_interval: The time to await before sending next request, in milliseconds. :vartype request_interval: JSON :ivar http_compression_type: Http Compression Type to Send data in compressed format with - Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string + (or Expression with resultType string). :vartype http_compression_type: JSON """ @@ -50630,7 +50807,7 @@ def __init__( Type: string (or Expression with resultType string). :paramtype request_method: JSON :keyword additional_headers: The additional HTTP headers in the request to the RESTful API. - Type: string (or Expression with resultType string). + Type: key value pairs (value should be string type). :paramtype additional_headers: JSON :keyword http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. @@ -50640,7 +50817,8 @@ def __init__( :keyword request_interval: The time to await before sending next request, in milliseconds. :paramtype request_interval: JSON :keyword http_compression_type: Http Compression Type to Send data in compressed format with - Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string + (or Expression with resultType string). :paramtype http_compression_type: JSON """ super().__init__( @@ -50703,7 +50881,7 @@ class RestSource(CopySource): # pylint: disable=too-many-instance-attributes :ivar request_interval: The time to await before sending next page request. :vartype request_interval: JSON :ivar additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). + key value pairs (value should be string type). :vartype additional_columns: JSON """ @@ -50780,7 +50958,7 @@ def __init__( :keyword request_interval: The time to await before sending next page request. :paramtype request_interval: JSON :keyword additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). + key value pairs (value should be string type). :paramtype additional_columns: JSON """ super().__init__( @@ -51929,14 +52107,699 @@ def __init__( disable_metrics_collection=disable_metrics_collection, **kwargs ) - self.type: str = "SalesforceServiceCloudSink" + self.type: str = "SalesforceServiceCloudSink" + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + + +class SalesforceServiceCloudSource(CopySource): + """A copy activity Salesforce Service Cloud source. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :vartype source_retry_count: JSON + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype source_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar query: Database query. Type: string (or Expression with resultType string). + :vartype query: JSON + :ivar read_behavior: The read behavior for the operation. Default is Query. Allowed values: + Query/QueryAll. Type: string (or Expression with resultType string). + :vartype read_behavior: JSON + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :vartype additional_columns: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query": {"key": "query", "type": "object"}, + "read_behavior": {"key": "readBehavior", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + query: Optional[JSON] = None, + read_behavior: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: JSON + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword query: Database query. Type: string (or Expression with resultType string). + :paramtype query: JSON + :keyword read_behavior: The read behavior for the operation. Default is Query. Allowed values: + Query/QueryAll. Type: string (or Expression with resultType string). + :paramtype read_behavior: JSON + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "SalesforceServiceCloudSource" + self.query = query + self.read_behavior = read_behavior + self.additional_columns = additional_columns + + +class SalesforceServiceCloudV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes + """Linked service for Salesforce Service Cloud V2. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[JSON] + :ivar environment_url: The URL of Salesforce Service Cloud instance. For example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :vartype environment_url: JSON + :ivar client_id: The client Id for OAuth 2.0 Client Credentials Flow authentication of the + Salesforce instance. Type: string (or Expression with resultType string). + :vartype client_id: JSON + :ivar client_secret: The client secret for OAuth 2.0 Client Credentials Flow authentication of + the Salesforce instance. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar api_version: The Salesforce API version used in ADF. The version must be larger than or + equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with + resultType string). + :vartype api_version: JSON + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "environment_url": {"key": "typeProperties.environmentUrl", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "api_version": {"key": "typeProperties.apiVersion", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + connect_via: Optional["_models.IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + environment_url: Optional[JSON] = None, + client_id: Optional[JSON] = None, + client_secret: Optional["_models.SecretBase"] = None, + api_version: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[JSON] + :keyword environment_url: The URL of Salesforce Service Cloud instance. For example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :paramtype environment_url: JSON + :keyword client_id: The client Id for OAuth 2.0 Client Credentials Flow authentication of the + Salesforce instance. Type: string (or Expression with resultType string). + :paramtype client_id: JSON + :keyword client_secret: The client secret for OAuth 2.0 Client Credentials Flow authentication + of the Salesforce instance. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword api_version: The Salesforce API version used in ADF. The version must be larger than + or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with + resultType string). + :paramtype api_version: JSON + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type: str = "SalesforceServiceCloudV2" + self.environment_url = environment_url + self.client_id = client_id + self.client_secret = client_secret + self.api_version = api_version + self.encrypted_credential = encrypted_credential + + +class SalesforceServiceCloudV2ObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes + """The Salesforce Service Cloud V2 object dataset. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :vartype structure: JSON + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[JSON] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar object_api_name: The Salesforce Service Cloud V2 object API name. Type: string (or + Expression with resultType string). + :vartype object_api_name: JSON + :ivar report_id: The Salesforce Service Cloud V2 reportId. Type: string (or Expression with + resultType string). + :vartype report_id: JSON + """ + + _validation = { + "type": {"required": True}, + "linked_service_name": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "object_api_name": {"key": "typeProperties.objectApiName", "type": "object"}, + "report_id": {"key": "typeProperties.reportId", "type": "object"}, + } + + def __init__( + self, + *, + linked_service_name: "_models.LinkedServiceReference", + additional_properties: Optional[Dict[str, JSON]] = None, + description: Optional[str] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + folder: Optional["_models.DatasetFolder"] = None, + object_api_name: Optional[JSON] = None, + report_id: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: JSON + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[JSON] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword object_api_name: The Salesforce Service Cloud V2 object API name. Type: string (or + Expression with resultType string). + :paramtype object_api_name: JSON + :keyword report_id: The Salesforce Service Cloud V2 reportId. Type: string (or Expression with + resultType string). + :paramtype report_id: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type: str = "SalesforceServiceCloudV2Object" + self.object_api_name = object_api_name + self.report_id = report_id + + +class SalesforceServiceCloudV2Sink(CopySink): # pylint: disable=too-many-instance-attributes + """A copy activity Salesforce Service Cloud V2 sink. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :vartype write_batch_size: JSON + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype write_batch_timeout: JSON + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :vartype sink_retry_count: JSON + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype sink_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar write_behavior: The write behavior for the operation. Default is Insert. Known values + are: "Insert" and "Upsert". + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceV2SinkWriteBehavior + :ivar external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :vartype external_id_field_name: JSON + :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :vartype ignore_null_values: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "external_id_field_name": {"key": "externalIdFieldName", "type": "object"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + write_behavior: Optional[Union[str, "_models.SalesforceV2SinkWriteBehavior"]] = None, + external_id_field_name: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: JSON + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: JSON + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: JSON + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword write_behavior: The write behavior for the operation. Default is Insert. Known values + are: "Insert" and "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceV2SinkWriteBehavior + :keyword external_id_field_name: The name of the external ID field for upsert operation. + Default value is 'Id' column. Type: string (or Expression with resultType string). + :paramtype external_id_field_name: JSON + :keyword ignore_null_values: The flag indicating whether or not to ignore null values from + input dataset (except key fields) during write operation. Default value is false. If set it to + true, it means ADF will leave the data in the destination object unchanged when doing + upsert/update operation and insert defined default value when doing insert operation, versus + ADF will update the data in the destination object to NULL when doing upsert/update operation + and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType + boolean). + :paramtype ignore_null_values: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "SalesforceServiceCloudV2Sink" + self.write_behavior = write_behavior + self.external_id_field_name = external_id_field_name + self.ignore_null_values = ignore_null_values + + +class SalesforceServiceCloudV2Source(CopySource): + """A copy activity Salesforce Service Cloud V2 source. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy source type. Required. + :vartype type: str + :ivar source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :vartype source_retry_count: JSON + :ivar source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype source_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar soql_query: Database query. Type: string (or Expression with resultType string). + :vartype soql_query: JSON + :ivar read_behavior: The read behavior for the operation. Default is query. Allowed values: + query/queryAll. Type: string (or Expression with resultType string). + :vartype read_behavior: JSON + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :vartype additional_columns: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "source_retry_count": {"key": "sourceRetryCount", "type": "object"}, + "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "soql_query": {"key": "SOQLQuery", "type": "object"}, + "read_behavior": {"key": "readBehavior", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + source_retry_count: Optional[JSON] = None, + source_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + soql_query: Optional[JSON] = None, + read_behavior: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :paramtype source_retry_count: JSON + :keyword source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype source_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the source + data store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword soql_query: Database query. Type: string (or Expression with resultType string). + :paramtype soql_query: JSON + :keyword read_behavior: The read behavior for the operation. Default is query. Allowed values: + query/queryAll. Type: string (or Expression with resultType string). + :paramtype read_behavior: JSON + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: JSON + """ + super().__init__( + additional_properties=additional_properties, + source_retry_count=source_retry_count, + source_retry_wait=source_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "SalesforceServiceCloudV2Source" + self.soql_query = soql_query + self.read_behavior = read_behavior + self.additional_columns = additional_columns + + +class SalesforceSink(CopySink): # pylint: disable=too-many-instance-attributes + """A copy activity Salesforce sink. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Copy sink type. Required. + :vartype type: str + :ivar write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :vartype write_batch_size: JSON + :ivar write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype write_batch_timeout: JSON + :ivar sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :vartype sink_retry_count: JSON + :ivar sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype sink_retry_wait: JSON + :ivar max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :vartype max_concurrent_connections: JSON + :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :vartype disable_metrics_collection: JSON + :ivar write_behavior: The write behavior for the operation. Default is Insert. Known values + are: "Insert" and "Upsert". + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :ivar external_id_field_name: The name of the external ID field for upsert operation. Default + value is 'Id' column. Type: string (or Expression with resultType string). + :vartype external_id_field_name: JSON + :ivar ignore_null_values: The flag indicating whether or not to ignore null values from input + dataset (except key fields) during write operation. Default value is false. If set it to true, + it means ADF will leave the data in the destination object unchanged when doing upsert/update + operation and insert defined default value when doing insert operation, versus ADF will update + the data in the destination object to NULL when doing upsert/update operation and insert NULL + value when doing insert operation. Type: boolean (or Expression with resultType boolean). + :vartype ignore_null_values: JSON + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "write_batch_size": {"key": "writeBatchSize", "type": "object"}, + "write_batch_timeout": {"key": "writeBatchTimeout", "type": "object"}, + "sink_retry_count": {"key": "sinkRetryCount", "type": "object"}, + "sink_retry_wait": {"key": "sinkRetryWait", "type": "object"}, + "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, + "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "write_behavior": {"key": "writeBehavior", "type": "str"}, + "external_id_field_name": {"key": "externalIdFieldName", "type": "object"}, + "ignore_null_values": {"key": "ignoreNullValues", "type": "object"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + write_batch_size: Optional[JSON] = None, + write_batch_timeout: Optional[JSON] = None, + sink_retry_count: Optional[JSON] = None, + sink_retry_wait: Optional[JSON] = None, + max_concurrent_connections: Optional[JSON] = None, + disable_metrics_collection: Optional[JSON] = None, + write_behavior: Optional[Union[str, "_models.SalesforceSinkWriteBehavior"]] = None, + external_id_field_name: Optional[JSON] = None, + ignore_null_values: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :paramtype write_batch_size: JSON + :keyword write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype write_batch_timeout: JSON + :keyword sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :paramtype sink_retry_count: JSON + :keyword sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype sink_retry_wait: JSON + :keyword max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :paramtype max_concurrent_connections: JSON + :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :paramtype disable_metrics_collection: JSON + :keyword write_behavior: The write behavior for the operation. Default is Insert. Known values + are: "Insert" and "Upsert". + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :keyword external_id_field_name: The name of the external ID field for upsert operation. + Default value is 'Id' column. Type: string (or Expression with resultType string). + :paramtype external_id_field_name: JSON + :keyword ignore_null_values: The flag indicating whether or not to ignore null values from + input dataset (except key fields) during write operation. Default value is false. If set it to + true, it means ADF will leave the data in the destination object unchanged when doing + upsert/update operation and insert defined default value when doing insert operation, versus + ADF will update the data in the destination object to NULL when doing upsert/update operation + and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType + boolean). + :paramtype ignore_null_values: JSON + """ + super().__init__( + additional_properties=additional_properties, + write_batch_size=write_batch_size, + write_batch_timeout=write_batch_timeout, + sink_retry_count=sink_retry_count, + sink_retry_wait=sink_retry_wait, + max_concurrent_connections=max_concurrent_connections, + disable_metrics_collection=disable_metrics_collection, + **kwargs + ) + self.type: str = "SalesforceSink" self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values -class SalesforceServiceCloudSource(CopySource): - """A copy activity Salesforce Service Cloud source. +class SalesforceSource(TabularSource): + """A copy activity Salesforce source. All required parameters must be populated in order to send to Azure. @@ -51957,14 +52820,17 @@ class SalesforceServiceCloudSource(CopySource): :ivar disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :vartype disable_metrics_collection: JSON + :ivar query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :vartype query_timeout: JSON + :ivar additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :vartype additional_columns: JSON :ivar query: Database query. Type: string (or Expression with resultType string). :vartype query: JSON :ivar read_behavior: The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). :vartype read_behavior: JSON - :ivar additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :vartype additional_columns: JSON """ _validation = { @@ -51978,9 +52844,10 @@ class SalesforceServiceCloudSource(CopySource): "source_retry_wait": {"key": "sourceRetryWait", "type": "object"}, "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, + "query_timeout": {"key": "queryTimeout", "type": "object"}, + "additional_columns": {"key": "additionalColumns", "type": "object"}, "query": {"key": "query", "type": "object"}, "read_behavior": {"key": "readBehavior", "type": "object"}, - "additional_columns": {"key": "additionalColumns", "type": "object"}, } def __init__( @@ -51991,9 +52858,10 @@ def __init__( source_retry_wait: Optional[JSON] = None, max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, + query_timeout: Optional[JSON] = None, + additional_columns: Optional[JSON] = None, query: Optional[JSON] = None, read_behavior: Optional[JSON] = None, - additional_columns: Optional[JSON] = None, **kwargs: Any ) -> None: """ @@ -52012,14 +52880,17 @@ def __init__( :keyword disable_metrics_collection: If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). :paramtype disable_metrics_collection: JSON + :keyword query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :paramtype query_timeout: JSON + :keyword additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects(AdditionalColumns) (or Expression with resultType array of objects). + :paramtype additional_columns: JSON :keyword query: Database query. Type: string (or Expression with resultType string). :paramtype query: JSON :keyword read_behavior: The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with resultType string). :paramtype read_behavior: JSON - :keyword additional_columns: Specifies the additional columns to be added to source data. Type: - array of objects(AdditionalColumns) (or Expression with resultType array of objects). - :paramtype additional_columns: JSON """ super().__init__( additional_properties=additional_properties, @@ -52027,16 +52898,244 @@ def __init__( source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, + query_timeout=query_timeout, + additional_columns=additional_columns, **kwargs ) - self.type: str = "SalesforceServiceCloudSource" + self.type: str = "SalesforceSource" self.query = query self.read_behavior = read_behavior - self.additional_columns = additional_columns -class SalesforceSink(CopySink): # pylint: disable=too-many-instance-attributes - """A copy activity Salesforce sink. +class SalesforceV2LinkedService(LinkedService): # pylint: disable=too-many-instance-attributes + """Linked service for Salesforce V2. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of linked service. Required. + :vartype type: str + :ivar connect_via: The integration runtime reference. + :vartype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :ivar description: Linked service description. + :vartype description: str + :ivar parameters: Parameters for linked service. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the linked service. + :vartype annotations: list[JSON] + :ivar environment_url: The URL of Salesforce instance. For example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :vartype environment_url: JSON + :ivar client_id: The client Id for OAuth 2.0 Client Credentials Flow authentication of the + Salesforce instance. Type: string (or Expression with resultType string). + :vartype client_id: JSON + :ivar client_secret: The client secret for OAuth 2.0 Client Credentials Flow authentication of + the Salesforce instance. + :vartype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :ivar api_version: The Salesforce API version used in ADF. The version must be larger than or + equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with + resultType string). + :vartype api_version: JSON + :ivar encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string. + :vartype encrypted_credential: str + """ + + _validation = { + "type": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "connect_via": {"key": "connectVia", "type": "IntegrationRuntimeReference"}, + "description": {"key": "description", "type": "str"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "environment_url": {"key": "typeProperties.environmentUrl", "type": "object"}, + "client_id": {"key": "typeProperties.clientId", "type": "object"}, + "client_secret": {"key": "typeProperties.clientSecret", "type": "SecretBase"}, + "api_version": {"key": "typeProperties.apiVersion", "type": "object"}, + "encrypted_credential": {"key": "typeProperties.encryptedCredential", "type": "str"}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, JSON]] = None, + connect_via: Optional["_models.IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + environment_url: Optional[JSON] = None, + client_id: Optional[JSON] = None, + client_secret: Optional["_models.SecretBase"] = None, + api_version: Optional[JSON] = None, + encrypted_credential: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword connect_via: The integration runtime reference. + :paramtype connect_via: ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :keyword description: Linked service description. + :paramtype description: str + :keyword parameters: Parameters for linked service. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the linked service. + :paramtype annotations: list[JSON] + :keyword environment_url: The URL of Salesforce instance. For example, + 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + :paramtype environment_url: JSON + :keyword client_id: The client Id for OAuth 2.0 Client Credentials Flow authentication of the + Salesforce instance. Type: string (or Expression with resultType string). + :paramtype client_id: JSON + :keyword client_secret: The client secret for OAuth 2.0 Client Credentials Flow authentication + of the Salesforce instance. + :paramtype client_secret: ~azure.mgmt.datafactory.models.SecretBase + :keyword api_version: The Salesforce API version used in ADF. The version must be larger than + or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with + resultType string). + :paramtype api_version: JSON + :keyword encrypted_credential: The encrypted credential used for authentication. Credentials + are encrypted using the integration runtime credential manager. Type: string. + :paramtype encrypted_credential: str + """ + super().__init__( + additional_properties=additional_properties, + connect_via=connect_via, + description=description, + parameters=parameters, + annotations=annotations, + **kwargs + ) + self.type: str = "SalesforceV2" + self.environment_url = environment_url + self.client_id = client_id + self.client_secret = client_secret + self.api_version = api_version + self.encrypted_credential = encrypted_credential + + +class SalesforceV2ObjectDataset(Dataset): # pylint: disable=too-many-instance-attributes + """The Salesforce V2 object dataset. + + All required parameters must be populated in order to send to Azure. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, JSON] + :ivar type: Type of dataset. Required. + :vartype type: str + :ivar description: Dataset description. + :vartype description: str + :ivar structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :vartype structure: JSON + :ivar schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :vartype schema: JSON + :ivar linked_service_name: Linked service reference. Required. + :vartype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :ivar parameters: Parameters for dataset. + :vartype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :ivar annotations: List of tags that can be used for describing the Dataset. + :vartype annotations: list[JSON] + :ivar folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :vartype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :ivar object_api_name: The Salesforce V2 object API name. Type: string (or Expression with + resultType string). + :vartype object_api_name: JSON + :ivar report_id: The Salesforce V2 report Id. Type: string (or Expression with resultType + string). + :vartype report_id: JSON + """ + + _validation = { + "type": {"required": True}, + "linked_service_name": {"required": True}, + } + + _attribute_map = { + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "structure": {"key": "structure", "type": "object"}, + "schema": {"key": "schema", "type": "object"}, + "linked_service_name": {"key": "linkedServiceName", "type": "LinkedServiceReference"}, + "parameters": {"key": "parameters", "type": "{ParameterSpecification}"}, + "annotations": {"key": "annotations", "type": "[object]"}, + "folder": {"key": "folder", "type": "DatasetFolder"}, + "object_api_name": {"key": "typeProperties.objectApiName", "type": "object"}, + "report_id": {"key": "typeProperties.reportId", "type": "object"}, + } + + def __init__( + self, + *, + linked_service_name: "_models.LinkedServiceReference", + additional_properties: Optional[Dict[str, JSON]] = None, + description: Optional[str] = None, + structure: Optional[JSON] = None, + schema: Optional[JSON] = None, + parameters: Optional[Dict[str, "_models.ParameterSpecification"]] = None, + annotations: Optional[List[JSON]] = None, + folder: Optional["_models.DatasetFolder"] = None, + object_api_name: Optional[JSON] = None, + report_id: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, JSON] + :keyword description: Dataset description. + :paramtype description: str + :keyword structure: Columns that define the structure of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetDataElement. + :paramtype structure: JSON + :keyword schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :paramtype schema: JSON + :keyword linked_service_name: Linked service reference. Required. + :paramtype linked_service_name: ~azure.mgmt.datafactory.models.LinkedServiceReference + :keyword parameters: Parameters for dataset. + :paramtype parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] + :keyword annotations: List of tags that can be used for describing the Dataset. + :paramtype annotations: list[JSON] + :keyword folder: The folder that this Dataset is in. If not specified, Dataset will appear at + the root level. + :paramtype folder: ~azure.mgmt.datafactory.models.DatasetFolder + :keyword object_api_name: The Salesforce V2 object API name. Type: string (or Expression with + resultType string). + :paramtype object_api_name: JSON + :keyword report_id: The Salesforce V2 report Id. Type: string (or Expression with resultType + string). + :paramtype report_id: JSON + """ + super().__init__( + additional_properties=additional_properties, + description=description, + structure=structure, + schema=schema, + linked_service_name=linked_service_name, + parameters=parameters, + annotations=annotations, + folder=folder, + **kwargs + ) + self.type: str = "SalesforceV2Object" + self.object_api_name = object_api_name + self.report_id = report_id + + +class SalesforceV2Sink(CopySink): # pylint: disable=too-many-instance-attributes + """A copy activity Salesforce V2 sink. All required parameters must be populated in order to send to Azure. @@ -52065,7 +53164,7 @@ class SalesforceSink(CopySink): # pylint: disable=too-many-instance-attributes :vartype disable_metrics_collection: JSON :ivar write_behavior: The write behavior for the operation. Default is Insert. Known values are: "Insert" and "Upsert". - :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :vartype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceV2SinkWriteBehavior :ivar external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :vartype external_id_field_name: JSON @@ -52106,7 +53205,7 @@ def __init__( sink_retry_wait: Optional[JSON] = None, max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, - write_behavior: Optional[Union[str, "_models.SalesforceSinkWriteBehavior"]] = None, + write_behavior: Optional[Union[str, "_models.SalesforceV2SinkWriteBehavior"]] = None, external_id_field_name: Optional[JSON] = None, ignore_null_values: Optional[JSON] = None, **kwargs: Any @@ -52135,7 +53234,7 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword write_behavior: The write behavior for the operation. Default is Insert. Known values are: "Insert" and "Upsert". - :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSinkWriteBehavior + :paramtype write_behavior: str or ~azure.mgmt.datafactory.models.SalesforceV2SinkWriteBehavior :keyword external_id_field_name: The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). :paramtype external_id_field_name: JSON @@ -52158,14 +53257,14 @@ def __init__( disable_metrics_collection=disable_metrics_collection, **kwargs ) - self.type: str = "SalesforceSink" + self.type: str = "SalesforceV2Sink" self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name self.ignore_null_values = ignore_null_values -class SalesforceSource(TabularSource): - """A copy activity Salesforce source. +class SalesforceV2Source(TabularSource): + """A copy activity Salesforce V2 source. All required parameters must be populated in order to send to Azure. @@ -52192,10 +53291,10 @@ class SalesforceSource(TabularSource): :ivar additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :vartype additional_columns: JSON - :ivar query: Database query. Type: string (or Expression with resultType string). - :vartype query: JSON - :ivar read_behavior: The read behavior for the operation. Default is Query. Allowed values: - Query/QueryAll. Type: string (or Expression with resultType string). + :ivar soql_query: Database query. Type: string (or Expression with resultType string). + :vartype soql_query: JSON + :ivar read_behavior: The read behavior for the operation. Default is query. Allowed values: + query/queryAll. Type: string (or Expression with resultType string). :vartype read_behavior: JSON """ @@ -52212,7 +53311,7 @@ class SalesforceSource(TabularSource): "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "query_timeout": {"key": "queryTimeout", "type": "object"}, "additional_columns": {"key": "additionalColumns", "type": "object"}, - "query": {"key": "query", "type": "object"}, + "soql_query": {"key": "SOQLQuery", "type": "object"}, "read_behavior": {"key": "readBehavior", "type": "object"}, } @@ -52226,7 +53325,7 @@ def __init__( disable_metrics_collection: Optional[JSON] = None, query_timeout: Optional[JSON] = None, additional_columns: Optional[JSON] = None, - query: Optional[JSON] = None, + soql_query: Optional[JSON] = None, read_behavior: Optional[JSON] = None, **kwargs: Any ) -> None: @@ -52252,10 +53351,10 @@ def __init__( :keyword additional_columns: Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). :paramtype additional_columns: JSON - :keyword query: Database query. Type: string (or Expression with resultType string). - :paramtype query: JSON - :keyword read_behavior: The read behavior for the operation. Default is Query. Allowed values: - Query/QueryAll. Type: string (or Expression with resultType string). + :keyword soql_query: Database query. Type: string (or Expression with resultType string). + :paramtype soql_query: JSON + :keyword read_behavior: The read behavior for the operation. Default is query. Allowed values: + query/queryAll. Type: string (or Expression with resultType string). :paramtype read_behavior: JSON """ super().__init__( @@ -52268,8 +53367,8 @@ def __init__( additional_columns=additional_columns, **kwargs ) - self.type: str = "SalesforceSource" - self.query = query + self.type: str = "SalesforceV2Source" + self.soql_query = soql_query self.read_behavior = read_behavior @@ -57056,6 +58155,9 @@ class SftpWriteSettings(StoreWriteSettings): :vartype disable_metrics_collection: JSON :ivar copy_behavior: The type of copy behavior for copy sink. :vartype copy_behavior: JSON + :ivar metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :vartype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :ivar operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :vartype operation_timeout: JSON @@ -57075,6 +58177,7 @@ class SftpWriteSettings(StoreWriteSettings): "max_concurrent_connections": {"key": "maxConcurrentConnections", "type": "object"}, "disable_metrics_collection": {"key": "disableMetricsCollection", "type": "object"}, "copy_behavior": {"key": "copyBehavior", "type": "object"}, + "metadata": {"key": "metadata", "type": "[MetadataItem]"}, "operation_timeout": {"key": "operationTimeout", "type": "object"}, "use_temp_file_rename": {"key": "useTempFileRename", "type": "object"}, } @@ -57086,6 +58189,7 @@ def __init__( max_concurrent_connections: Optional[JSON] = None, disable_metrics_collection: Optional[JSON] = None, copy_behavior: Optional[JSON] = None, + metadata: Optional[List["_models.MetadataItem"]] = None, operation_timeout: Optional[JSON] = None, use_temp_file_rename: Optional[JSON] = None, **kwargs: Any @@ -57102,6 +58206,9 @@ def __init__( :paramtype disable_metrics_collection: JSON :keyword copy_behavior: The type of copy behavior for copy sink. :paramtype copy_behavior: JSON + :keyword metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :paramtype metadata: list[~azure.mgmt.datafactory.models.MetadataItem] :keyword operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :paramtype operation_timeout: JSON @@ -57115,6 +58222,7 @@ def __init__( max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, + metadata=metadata, **kwargs ) self.type: str = "SftpWriteSettings" @@ -59145,7 +60253,8 @@ class SqlDWSource(TabularSource): # pylint: disable=too-many-instance-attribute is ReadCommitted. Type: string (or Expression with resultType string). :vartype isolation_level: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -59228,7 +60337,8 @@ def __init__( value is ReadCommitted. Type: string (or Expression with resultType string). :paramtype isolation_level: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -59332,8 +60442,8 @@ class SqlMISink(CopySink): # pylint: disable=too-many-instance-attributes :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). :vartype sql_writer_use_table_lock: JSON - :ivar write_behavior: White behavior when copying data into azure SQL MI. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :ivar write_behavior: White behavior when copying data into azure SQL MI. Type: string (or + Expression with resultType string). :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings @@ -59429,8 +60539,8 @@ def __init__( :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). :paramtype sql_writer_use_table_lock: JSON - :keyword write_behavior: White behavior when copying data into azure SQL MI. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :keyword write_behavior: White behavior when copying data into azure SQL MI. Type: string (or + Expression with resultType string). :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings @@ -59501,7 +60611,8 @@ class SqlMISource(TabularSource): # pylint: disable=too-many-instance-attribute :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -59587,7 +60698,8 @@ def __init__( :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -59830,8 +60942,8 @@ class SqlServerSink(CopySink): # pylint: disable=too-many-instance-attributes :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). :vartype sql_writer_use_table_lock: JSON - :ivar write_behavior: Write behavior when copying data into sql server. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :ivar write_behavior: Write behavior when copying data into sql server. Type: string (or + Expression with resultType string). :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings @@ -59927,8 +61039,8 @@ def __init__( :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). :paramtype sql_writer_use_table_lock: JSON - :keyword write_behavior: Write behavior when copying data into sql server. Type: - SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + :keyword write_behavior: Write behavior when copying data into sql server. Type: string (or + Expression with resultType string). :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings @@ -59999,7 +61111,8 @@ class SqlServerSource(TabularSource): # pylint: disable=too-many-instance-attri :ivar produce_additional_types: Which additional types to produce. :vartype produce_additional_types: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -60085,7 +61198,8 @@ def __init__( :keyword produce_additional_types: Which additional types to produce. :paramtype produce_additional_types: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -60401,8 +61515,8 @@ class SqlSink(CopySink): # pylint: disable=too-many-instance-attributes :ivar sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). :vartype sql_writer_use_table_lock: JSON - :ivar write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or - Expression with resultType SqlWriteBehaviorEnum). + :ivar write_behavior: Write behavior when copying data into sql. Type: string (or Expression + with resultType string). :vartype write_behavior: JSON :ivar upsert_settings: SQL upsert settings. :vartype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings @@ -60498,8 +61612,8 @@ def __init__( :keyword sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). :paramtype sql_writer_use_table_lock: JSON - :keyword write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum - (or Expression with resultType SqlWriteBehaviorEnum). + :keyword write_behavior: Write behavior when copying data into sql. Type: string (or Expression + with resultType string). :paramtype write_behavior: JSON :keyword upsert_settings: SQL upsert settings. :paramtype upsert_settings: ~azure.mgmt.datafactory.models.SqlUpsertSettings @@ -60568,7 +61682,8 @@ class SqlSource(TabularSource): # pylint: disable=too-many-instance-attributes is ReadCommitted. Type: string (or Expression with resultType string). :vartype isolation_level: JSON :ivar partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :vartype partition_option: JSON :ivar partition_settings: The settings that will be leveraged for Sql source partitioning. :vartype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -60650,7 +61765,8 @@ def __init__( value is ReadCommitted. Type: string (or Expression with resultType string). :paramtype isolation_level: JSON :keyword partition_option: The partition mechanism that will be used for Sql read in parallel. - Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or + Expression with resultType string). :paramtype partition_option: JSON :keyword partition_settings: The settings that will be leveraged for Sql source partitioning. :paramtype partition_settings: ~azure.mgmt.datafactory.models.SqlPartitionSettings @@ -65658,7 +66774,7 @@ class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attri :ivar headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :vartype headers: JSON + :vartype headers: dict[str, str] :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :vartype body: JSON @@ -65666,6 +66782,15 @@ class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attri :vartype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :ivar disable_cert_validation: When set to true, Certificate validation will be disabled. :vartype disable_cert_validation: bool + :ivar http_request_timeout: Timeout for the HTTP request to get a response. Format is in + TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The + default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes. + :vartype http_request_timeout: JSON + :ivar turn_off_async: Option to disable invoking HTTP GET on location given in response header + of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in + response header. If set false then continues to invoke HTTP GET call on location given in http + response headers. + :vartype turn_off_async: bool :ivar datasets: List of datasets passed to web endpoint. :vartype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] :ivar linked_services: List of linked services passed to web endpoint. @@ -65694,10 +66819,12 @@ class WebActivity(ExecutionActivity): # pylint: disable=too-many-instance-attri "policy": {"key": "policy", "type": "ActivityPolicy"}, "method": {"key": "typeProperties.method", "type": "str"}, "url": {"key": "typeProperties.url", "type": "object"}, - "headers": {"key": "typeProperties.headers", "type": "object"}, + "headers": {"key": "typeProperties.headers", "type": "{str}"}, "body": {"key": "typeProperties.body", "type": "object"}, "authentication": {"key": "typeProperties.authentication", "type": "WebActivityAuthentication"}, "disable_cert_validation": {"key": "typeProperties.disableCertValidation", "type": "bool"}, + "http_request_timeout": {"key": "typeProperties.httpRequestTimeout", "type": "object"}, + "turn_off_async": {"key": "typeProperties.turnOffAsync", "type": "bool"}, "datasets": {"key": "typeProperties.datasets", "type": "[DatasetReference]"}, "linked_services": {"key": "typeProperties.linkedServices", "type": "[LinkedServiceReference]"}, "connect_via": {"key": "typeProperties.connectVia", "type": "IntegrationRuntimeReference"}, @@ -65717,10 +66844,12 @@ def __init__( user_properties: Optional[List["_models.UserProperty"]] = None, linked_service_name: Optional["_models.LinkedServiceReference"] = None, policy: Optional["_models.ActivityPolicy"] = None, - headers: Optional[JSON] = None, + headers: Optional[Dict[str, str]] = None, body: Optional[JSON] = None, authentication: Optional["_models.WebActivityAuthentication"] = None, disable_cert_validation: Optional[bool] = None, + http_request_timeout: Optional[JSON] = None, + turn_off_async: Optional[bool] = None, datasets: Optional[List["_models.DatasetReference"]] = None, linked_services: Optional[List["_models.LinkedServiceReference"]] = None, connect_via: Optional["_models.IntegrationRuntimeReference"] = None, @@ -65758,7 +66887,7 @@ def __init__( :keyword headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :paramtype headers: JSON + :paramtype headers: dict[str, str] :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :paramtype body: JSON @@ -65766,6 +66895,15 @@ def __init__( :paramtype authentication: ~azure.mgmt.datafactory.models.WebActivityAuthentication :keyword disable_cert_validation: When set to true, Certificate validation will be disabled. :paramtype disable_cert_validation: bool + :keyword http_request_timeout: Timeout for the HTTP request to get a response. Format is in + TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The + default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes. + :paramtype http_request_timeout: JSON + :keyword turn_off_async: Option to disable invoking HTTP GET on location given in response + header of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given + in response header. If set false then continues to invoke HTTP GET call on location given in + http response headers. + :paramtype turn_off_async: bool :keyword datasets: List of datasets passed to web endpoint. :paramtype datasets: list[~azure.mgmt.datafactory.models.DatasetReference] :keyword linked_services: List of linked services passed to web endpoint. @@ -65792,6 +66930,8 @@ def __init__( self.body = body self.authentication = authentication self.disable_cert_validation = disable_cert_validation + self.http_request_timeout = http_request_timeout + self.turn_off_async = turn_off_async self.datasets = datasets self.linked_services = linked_services self.connect_via = connect_via @@ -66089,7 +67229,7 @@ class WebHookActivity(ControlActivity): # pylint: disable=too-many-instance-att :ivar headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :vartype headers: JSON + :vartype headers: dict[str, str] :ivar body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :vartype body: JSON @@ -66122,7 +67262,7 @@ class WebHookActivity(ControlActivity): # pylint: disable=too-many-instance-att "method": {"key": "typeProperties.method", "type": "str"}, "url": {"key": "typeProperties.url", "type": "object"}, "timeout": {"key": "typeProperties.timeout", "type": "str"}, - "headers": {"key": "typeProperties.headers", "type": "object"}, + "headers": {"key": "typeProperties.headers", "type": "{str}"}, "body": {"key": "typeProperties.body", "type": "object"}, "authentication": {"key": "typeProperties.authentication", "type": "WebActivityAuthentication"}, "report_status_on_call_back": {"key": "typeProperties.reportStatusOnCallBack", "type": "object"}, @@ -66142,7 +67282,7 @@ def __init__( user_properties: Optional[List["_models.UserProperty"]] = None, policy: Optional["_models.SecureInputOutputPolicy"] = None, timeout: Optional[str] = None, - headers: Optional[JSON] = None, + headers: Optional[Dict[str, str]] = None, body: Optional[JSON] = None, authentication: Optional["_models.WebActivityAuthentication"] = None, report_status_on_call_back: Optional[JSON] = None, @@ -66181,7 +67321,7 @@ def __init__( :keyword headers: Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - :paramtype headers: JSON + :paramtype headers: dict[str, str] :keyword body: Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). :paramtype body: JSON