diff --git a/src/SDKs/DataFactory/DataFactory.Tests/JsonSamples/LinkedServiceJsonSamples.cs b/src/SDKs/DataFactory/DataFactory.Tests/JsonSamples/LinkedServiceJsonSamples.cs index 7893ed3075ee6..cf80265945c7a 100644 --- a/src/SDKs/DataFactory/DataFactory.Tests/JsonSamples/LinkedServiceJsonSamples.cs +++ b/src/SDKs/DataFactory/DataFactory.Tests/JsonSamples/LinkedServiceJsonSamples.cs @@ -1860,6 +1860,11 @@ public class LinkedServiceJsonSamples : JsonSampleCollection ""PropertyBagPropertyName1"": ""PropertyBagValue1"", ""propertyBagPropertyName2"": ""PropertyBagValue2"", ""dateTime1"": ""2015-04-12T12:13:14Z"", - } + }, + ""retentionTimeInDays"": 35 } } ] diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AzureDatabricksLinkedService.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AzureDatabricksLinkedService.cs index 74354d688d223..432fd14872de1 100644 --- a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AzureDatabricksLinkedService.cs +++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/AzureDatabricksLinkedService.cs @@ -68,11 +68,20 @@ public AzureDatabricksLinkedService() /// user-specified Spark environment variables key-value pairs. /// Additional tags for cluster /// resources. + /// The driver node type for the + /// new cluster. Type: string (or Expression with resultType + /// string). + /// User-defined initialization + /// scripts for the new cluster. Type: array of strings (or Expression + /// with resultType array of strings). + /// Enable the elastic disk + /// on the new cluster. Type: boolean (or Expression with resultType + /// boolean). /// The encrypted credential used for /// authentication. Credentials are encrypted using the integration /// runtime credential manager. Type: string (or Expression with /// resultType string). - public AzureDatabricksLinkedService(object domain, SecretBase accessToken, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object existingClusterId = default(object), object newClusterVersion = default(object), object newClusterNumOfWorker = default(object), object newClusterNodeType = default(object), IDictionary newClusterSparkConf = default(IDictionary), IDictionary newClusterSparkEnvVars = default(IDictionary), IDictionary newClusterCustomTags = default(IDictionary), object encryptedCredential = default(object)) + public AzureDatabricksLinkedService(object domain, SecretBase accessToken, IDictionary additionalProperties = default(IDictionary), IntegrationRuntimeReference connectVia = default(IntegrationRuntimeReference), string description = default(string), IDictionary parameters = default(IDictionary), IList annotations = default(IList), object existingClusterId = default(object), object newClusterVersion = default(object), object newClusterNumOfWorker = default(object), object newClusterNodeType = default(object), IDictionary newClusterSparkConf = default(IDictionary), IDictionary newClusterSparkEnvVars = default(IDictionary), IDictionary newClusterCustomTags = default(IDictionary), object newClusterDriverNodeType = default(object), object newClusterInitScripts = default(object), object newClusterEnableElasticDisk = default(object), object encryptedCredential = default(object)) : base(additionalProperties, connectVia, description, parameters, annotations) { Domain = domain; @@ -84,6 +93,9 @@ public AzureDatabricksLinkedService() NewClusterSparkConf = newClusterSparkConf; NewClusterSparkEnvVars = newClusterSparkEnvVars; NewClusterCustomTags = newClusterCustomTags; + NewClusterDriverNodeType = newClusterDriverNodeType; + NewClusterInitScripts = newClusterInitScripts; + NewClusterEnableElasticDisk = newClusterEnableElasticDisk; EncryptedCredential = encryptedCredential; CustomInit(); } @@ -160,6 +172,28 @@ public AzureDatabricksLinkedService() [JsonProperty(PropertyName = "typeProperties.newClusterCustomTags")] public IDictionary NewClusterCustomTags { get; set; } + /// + /// Gets or sets the driver node type for the new cluster. Type: string + /// (or Expression with resultType string). + /// + [JsonProperty(PropertyName = "typeProperties.newClusterDriverNodeType")] + public object NewClusterDriverNodeType { get; set; } + + /// + /// Gets or sets user-defined initialization scripts for the new + /// cluster. Type: array of strings (or Expression with resultType + /// array of strings). + /// + [JsonProperty(PropertyName = "typeProperties.newClusterInitScripts")] + public object NewClusterInitScripts { get; set; } + + /// + /// Gets or sets enable the elastic disk on the new cluster. Type: + /// boolean (or Expression with resultType boolean). + /// + [JsonProperty(PropertyName = "typeProperties.newClusterEnableElasticDisk")] + public object NewClusterEnableElasticDisk { get; set; } + /// /// Gets or sets the encrypted credential used for authentication. /// Credentials are encrypted using the integration runtime credential diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/CustomActivity.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/CustomActivity.cs index 81f3ea75a0f40..ad877000f9ac9 100644 --- a/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/CustomActivity.cs +++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/Models/CustomActivity.cs @@ -54,7 +54,10 @@ public CustomActivity() /// is no restriction on the keys or values that can be used. The user /// specified custom activity has the full responsibility to consume /// and interpret the content defined. - public CustomActivity(string name, object command, IDictionary additionalProperties = default(IDictionary), string description = default(string), IList dependsOn = default(IList), IList userProperties = default(IList), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), LinkedServiceReference resourceLinkedService = default(LinkedServiceReference), object folderPath = default(object), CustomActivityReferenceObject referenceObjects = default(CustomActivityReferenceObject), IDictionary extendedProperties = default(IDictionary)) + /// The retention time for the files + /// submitted for custom activity. Type: double (or Expression with + /// resultType double). + public CustomActivity(string name, object command, IDictionary additionalProperties = default(IDictionary), string description = default(string), IList dependsOn = default(IList), IList userProperties = default(IList), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), LinkedServiceReference resourceLinkedService = default(LinkedServiceReference), object folderPath = default(object), CustomActivityReferenceObject referenceObjects = default(CustomActivityReferenceObject), IDictionary extendedProperties = default(IDictionary), object retentionTimeInDays = default(object)) : base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy) { Command = command; @@ -62,6 +65,7 @@ public CustomActivity() FolderPath = folderPath; ReferenceObjects = referenceObjects; ExtendedProperties = extendedProperties; + RetentionTimeInDays = retentionTimeInDays; CustomInit(); } @@ -105,6 +109,13 @@ public CustomActivity() [JsonProperty(PropertyName = "typeProperties.extendedProperties")] public IDictionary ExtendedProperties { get; set; } + /// + /// Gets or sets the retention time for the files submitted for custom + /// activity. Type: double (or Expression with resultType double). + /// + [JsonProperty(PropertyName = "typeProperties.retentionTimeInDays")] + public object RetentionTimeInDays { get; set; } + /// /// Validate the object. /// diff --git a/src/SDKs/DataFactory/Management.DataFactory/Generated/SdkInfo_DataFactoryManagementClient.cs b/src/SDKs/DataFactory/Management.DataFactory/Generated/SdkInfo_DataFactoryManagementClient.cs index c095b33afc062..f69b2008b84ca 100644 --- a/src/SDKs/DataFactory/Management.DataFactory/Generated/SdkInfo_DataFactoryManagementClient.cs +++ b/src/SDKs/DataFactory/Management.DataFactory/Generated/SdkInfo_DataFactoryManagementClient.cs @@ -39,10 +39,10 @@ public static IEnumerable> ApiInfo_DataFactoryMana // BEGIN: Code Generation Metadata Section public static readonly String AutoRestVersion = "latest"; public static readonly String AutoRestBootStrapperVersion = "autorest@2.0.4283"; - public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=C:\\Repos\\azure-sdk-for-net\\src\\SDKs"; + public static readonly String AutoRestCmdExecuted = "cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=F:\\git\\azure-sdk-for-net\\src\\SDKs"; public static readonly String GithubForkName = "Azure"; public static readonly String GithubBranchName = "master"; - public static readonly String GithubCommidId = "29f93fd613a783b7cce749669f24bf8e25aba96a"; + public static readonly String GithubCommidId = "5df40b9b08b77fe7b78f90f6b7c470600b8ea063"; public static readonly String CodeGenerationErrors = ""; public static readonly String GithubRepoName = "azure-rest-api-specs"; // END: Code Generation Metadata Section diff --git a/src/SDKs/DataFactory/Management.DataFactory/Microsoft.Azure.Management.DataFactory.csproj b/src/SDKs/DataFactory/Management.DataFactory/Microsoft.Azure.Management.DataFactory.csproj index 27e6c952db524..6a6e6d74dc7a5 100644 --- a/src/SDKs/DataFactory/Management.DataFactory/Microsoft.Azure.Management.DataFactory.csproj +++ b/src/SDKs/DataFactory/Management.DataFactory/Microsoft.Azure.Management.DataFactory.csproj @@ -10,10 +10,31 @@ Microsoft.Azure.Management.DataFactory Microsoft Azure resource management;Data Factory;ADF; diff --git a/src/SDKs/DataFactory/Management.DataFactory/changelog.md b/src/SDKs/DataFactory/Management.DataFactory/changelog.md index 24a73b11e547c..005e135bab2dc 100644 --- a/src/SDKs/DataFactory/Management.DataFactory/changelog.md +++ b/src/SDKs/DataFactory/Management.DataFactory/changelog.md @@ -1,29 +1,45 @@ # Changelog for the Azure Data Factory V2 .NET SDK ## Current version -## Version 3.0.2 - ### Feature Additions - - Added support for RestService Source - - Added support for SAP BW Open Hub Source - - Added support for collectionReference - - Added support for recovery mode for pipeline runs ## Version 3.0.1 - ### Feature Additions - Fixed AzureFunctionActivity + - Added support for RestService Source + - Added support for SAP BW Open Hub Source + - Added support for collectionReference + - Added recovery mode for more advanced pipeline run retry capabilities (i.e. from a specific activity) + - Added newClusterDriverNodeType, newClusterInitScripts, and newClusterEnableElasticDisk properties to DataBricks linked service + - Added retentionTimeInDays property to CustomActivity + - New connectors supported as Copy source: + * Office365 + * Native MongoDB + * CosmosDB MongoDB API + * ADLS Gen2 + * Dynamics AX + * Azure Data Explorer + * Oracle Service Cloud + * GoogleAdWords + - New connector supported as copy sink: + * ADLS Gen2 + * CosmosDB MongoDB API + * Azure Data Explorer + - Added support for incremental copy of files based on the lastModifiedTime for S3, File and Blob + - Added support to copy data from ADLS Gen1 to ADLS Gen2 with ACL + - Added support for ServiceUrl in the existing S3 linked service + - Added support for AADServicePrincipal authentication in OData linked service + - Added support for maxConcurrentConnections in copy source and sink ## Version 3.0.0 - ### Feature Additions - Added new APIs: * get DataPlane access * get and refresh Integration Runtime object metadata * get feature value - Added new activity and linked service types to support Azure Functions - - Added support for HDIngsight cluster with Enterprise Sercurity package - - Updated exisitng activities and datasets: + - Added support for HDIngsight cluster with Enterprise Sercurity package + - Updated exisitng activities and datasets: * Added 'tableName' property in datasets * Refactored Delete activity payload by adding more properties * Added support for expressions for SSIS activity property 'type' @@ -31,63 +47,55 @@ * Added 'schema' property to datasets ## Version 2.3.0 - ### Feature Additions - Added variables support to Pipelines - - Added new AppendVariable and SetVariable activities - - Added support for SecureInput in activities - - Added ScriptActions to on demand HDI linked service - - Added support for recursive Cancel operation on runs - - Added TumblingWindowRerunTrigger API + - Added new AppendVariable and SetVariable activities + - Added support for SecureInput in activities + - Added ScriptActions to on demand HDI linked service + - Added support for recursive Cancel operation on runs + - Added TumblingWindowRerunTrigger API ## Version 2.2.0 - ### Feature Additions - - Added folders to Pipeline and Dataset - - Added TumblingWindowTrigger dependsOn, offset and size properties - - Added new API to get GitHub access token - - Added new property on Databricks linked Service to set Spark environment variables - - Fixed the casing in JSON for FactoryGitHubConfiguration + - Added folders to Pipeline and Dataset + - Added TumblingWindowTrigger dependsOn, offset and size properties + - Added new API to get GitHub access token + - Added new property on Databricks linked Service to set Spark environment variables + - Fixed the casing in JSON for FactoryGitHubConfiguration ## Version 2.1.0 - ### Feature Additions * Added support for AzureBlob AAD Authentication * Added support for AzureStorage 2 new Linked Service type: AzureBlobStorage, AzureTableStorage ## Version 2.0.0 - ### Feature Additions - ### Breaking Changes * Updated UserProperties type in Activities ## Version 1.1.0 - ### Feature Additions * Added support for sharing self-hosted integration runtime across data factories and subscriptions * Added support for Databricks Spark Jar and Databricks Spark Python activities ## Version 1.0.0 - ### Feature Additions * Azure Data Factory new capabilities now fall under General Availability SLA. ADF has made cloud data integration easier than ever before. Build, schedule and manage data integration at scale wherever your data lives, in cloud or on-premises, with enterprise-grade security. Accelerate your data integration projects with over 70 data source connectors available, please refer to https://docs.microsoft.com/en-us/azure/data-factory/copy-activity-overview. Transform raw data into finished, shaped data that is ready for consumption by BI tools or custom applications. Easily lift your SQL Server Integration Services (SSIS) packages to Azure and let ADF manage your resources for you so you can increase productivity and lower TCO, please refer to https://docs.microsoft.com/en-us/sql/integration-services/lift-shift/ssis-azure-lift-shift-ssis-packages-overview?view=sql-server-2017. Meet your security and compliance needs while taking advantage of extensive capabilities and paying only for what you use. The ADF GA SDK changes include the following: - - The API 'removeNode’ on IR has been removed and replaced with DELETE API on IR node. - - The API 'POST pipelineRuns’ was renamed to 'POST queryPipelineRuns’ and 'PipelineRunFilterParameters’ was renamed to 'RunFilterParameters’. - - The API 'GET activityRuns’ using pipeline run id has been replaced with 'POST queryActivityRuns’. It also takes RunFilterParameters object in the body to provide more options to query and order the result. - - The API 'GET triggerRuns’ has been replaced with 'POST queryTriggerRuns’ and was moved to factory scope. This one too takes RunFilterParameters object in the body similar to previous query runs APIs. - - The API 'cancelPipelineRun’ has been moved to PipelineRuns operations and renamed to 'Cancel’. - - The property 'vstsConfiguration’ on factory resource has been renamed to repoConfiguration. - - Pipeline has new properties called 'userProperties’ which can be used to improve the run monitoring experience - - The error response format has been changed. It is now compliant with other Azure ARM services. Before the API-s were returning ErrorResponse object with code, message, target and details. Now, it returns CloudError object with another 'error’ object nested inside that contains code, message, target and details. - - Added If-Match header support on put calls and If-None-Match header support for get calls for ADF resources and sub resources. - - The response of 'PATCH' API on IR has been fixed to return the IR resource. + - The API 'removeNode’ on IR has been removed and replaced with DELETE API on IR node. + - The API 'POST pipelineRuns’ was renamed to 'POST queryPipelineRuns’ and 'PipelineRunFilterParameters’ was renamed to 'RunFilterParameters’. + - The API 'GET activityRuns’ using pipeline run id has been replaced with 'POST queryActivityRuns’. It also takes RunFilterParameters object in the body to provide more options to query and order the result. + - The API 'GET triggerRuns’ has been replaced with 'POST queryTriggerRuns’ and was moved to factory scope. This one too takes RunFilterParameters object in the body similar to previous query runs APIs. + - The API 'cancelPipelineRun’ has been moved to PipelineRuns operations and renamed to 'Cancel’. + - The property 'vstsConfiguration’ on factory resource has been renamed to repoConfiguration. + - Pipeline has new properties called 'userProperties’ which can be used to improve the run monitoring experience + - The error response format has been changed. It is now compliant with other Azure ARM services. Before the API-s were returning ErrorResponse object with code, message, target and details. Now, it returns CloudError object with another 'error’ object nested inside that contains code, message, target and details. + - Added If-Match header support on put calls and If-None-Match header support for get calls for ADF resources and sub resources. + - The response of 'PATCH' API on IR has been fixed to return the IR resource. - The 'cloudDataMovementUnits' property of Copy activity has been renamed to 'dataIntegrationUnits' * Remove maxParallelExecutionsPerNode limitation ## Version 0.8.0-preview - ### Feature Additions * Added Configure factory repository operation * Updated QuickBooks LinkedService to expose consumerKey and consumerSecret properties @@ -95,7 +103,6 @@ * Added Blob Events trigger ## Version 0.7.0-preview - ### Feature Additions * Added execution parameters and connection managers property on ExecuteSSISPackage Activity * Updated PostgreSql, MySql llinked service to use full connection string instead of server, database, schema, username and password @@ -104,7 +111,6 @@ * Added LinkedService, Dataset, CopySource for Responsys ## Version 0.6.0-preview - ### Feature Additions * Added new AzureDatabricks LinkedService and DatabricksNotebook Activity * Added headNodeSize and dataNodeSize properties in HDInsightOnDemand LinkedService @@ -115,7 +121,6 @@ * Added Linked Service Parameters support ## Version 0.5.0-preview - ### Feature Additions * Enable AAD auth via service principal and management service identity for Azure SQL DB/DW linked service types * Support integration runtime sharing across subscription and data factory @@ -125,14 +130,12 @@ * Add LinkedService, Dataset, CopySource for Vertica and Netezza ## Version 0.4.0-preview - ### Feature Additions * Add readBehavior to Salesforce Source * Enable Azure Key Vault support for all data store linked services * Add license type property to Azure SSIS integration runtime ## Version 0.3.0-preview - ### Feature Additions * Add SAP Cloud For Customer Source * Add SAP Cloud For Customer Dataset @@ -145,7 +148,6 @@ * Add integration runtime naming validation ## Version 0.2.1-preview - ### Feature Additions * Cancel pipeline run api. * Add AzureMySql linked service. @@ -158,6 +160,5 @@ * Support providing Salesforce passwords and security tokens as SecureStrings or as secrets in Azure Key Vault. ## Version 0.2.0-preview - ### Feature Additions * Initial public release of the Azure Data Factory V2 .NET SDK. diff --git a/src/SDKs/_metadata/datafactory_resource-manager.txt b/src/SDKs/_metadata/datafactory_resource-manager.txt index e6b7d8c7ee245..8e7b590b5246a 100644 --- a/src/SDKs/_metadata/datafactory_resource-manager.txt +++ b/src/SDKs/_metadata/datafactory_resource-manager.txt @@ -3,12 +3,12 @@ AutoRest installed successfully. Commencing code generation Generating CSharp code Executing AutoRest command -cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=C:\Repos\azure-sdk-for-net\src\SDKs -2019-02-21 22:01:50 UTC +cmd.exe /c autorest.cmd https://github.com/Azure/azure-rest-api-specs/blob/master/specification/datafactory/resource-manager/readme.md --csharp --version=latest --reflect-api-versions --tag=package-2018-06 --csharp-sdks-folder=F:\git\azure-sdk-for-net\src\SDKs +2019-02-22 19:23:28 UTC Azure-rest-api-specs repository information GitHub fork: Azure Branch: master -Commit: 29f93fd613a783b7cce749669f24bf8e25aba96a +Commit: 5df40b9b08b77fe7b78f90f6b7c470600b8ea063 AutoRest information Requested version: latest Bootstrapper version: autorest@2.0.4283