Skip to content

Commit

Permalink
[DataFactory]Added new features into 8.0.0 (Azure#31988)
Browse files Browse the repository at this point in the history
* [DataFactory]Added new features into 6.4.0

* [DataFactory]Added new features into 7.0.0

* [DataFactory]Added new features into 8.0.0

* Fix

* fix
  • Loading branch information
Jingshu923 authored and sofiar-msft committed Dec 7, 2022
1 parent 423660b commit ff303ad
Show file tree
Hide file tree
Showing 15 changed files with 248 additions and 22 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog for the Azure Data Factory V2 .NET SDK

## Version 8.0.0
### Feature Additions
### Breaking Changes
- Added properties to spark job activity
- Added disablePublish property into FactoryRepoConfiguration
- Added scriptBlockExecutionTimeout property into Script Activity

## Version 7.0.0
### Feature Additions
### Breaking Changes
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class FactoryGitHubConfiguration : FactoryRepoConfiguration
{
/// <summary>
/// Initializes a new instance of the FactoryGitHubConfiguration class.
/// </summary>
/// <param name="accountName">Account name.</param>
/// <param name="repositoryName">Repository name.</param>
/// <param name="collaborationBranch">Collaboration branch.</param>
/// <param name="rootFolder">Root folder.</param>
/// <param name="lastCommitId">Last commit id.</param>
/// ADF studio to favor automated publish.</param>
/// <param name="hostName">GitHub Enterprise host name. For example:
/// `https://github.mydomain.com`</param>
/// <param name="clientId">GitHub bring your own app client id.</param>
/// <param name="clientSecret">GitHub bring your own app client secret
/// information.</param>
public FactoryGitHubConfiguration(string accountName, string repositoryName, string collaborationBranch, string rootFolder, string lastCommitId, string hostName, string clientId = default(string), GitHubClientSecret clientSecret = default(GitHubClientSecret))
: base(accountName, repositoryName, collaborationBranch, rootFolder, lastCommitId)
{
HostName = hostName;
ClientId = clientId;
ClientSecret = clientSecret;
CustomInit();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class FactoryVSTSConfiguration : FactoryRepoConfiguration
{
/// <summary>
/// Initializes a new instance of the FactoryVSTSConfiguration class.
/// </summary>
/// <param name="accountName">Account name.</param>
/// <param name="repositoryName">Repository name.</param>
/// <param name="collaborationBranch">Collaboration branch.</param>
/// <param name="rootFolder">Root folder.</param>
/// <param name="projectName">VSTS project name.</param>
/// <param name="lastCommitId">Last commit id.</param>
/// <param name="tenantId">VSTS tenant id.</param>
public FactoryVSTSConfiguration(string accountName, string repositoryName, string collaborationBranch, string rootFolder, string projectName, string lastCommitId, string tenantId)
: base(accountName, repositoryName, collaborationBranch, rootFolder, lastCommitId)
{
ProjectName = projectName;
TenantId = tenantId;
CustomInit();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
using System.Collections.Generic;

namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class ScriptActivity : ExecutionActivity
{
/// <summary>
/// Initializes a new instance of the ScriptActivity class.
/// </summary>
/// <param name="name">Activity name.</param>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="description">Activity description.</param>
/// <param name="dependsOn">Activity depends on condition.</param>
/// <param name="userProperties">Activity user properties.</param>
/// <param name="linkedServiceName">Linked service reference.</param>
/// <param name="policy">Activity policy.</param>
/// <param name="scripts">Array of script blocks. Type: array.</param>
/// <param name="logSettings">Log settings of script activity.</param>
public ScriptActivity(string name, IDictionary<string, object> additionalProperties, string description, IList<ActivityDependency> dependsOn, IList<UserProperty> userProperties, LinkedServiceReference linkedServiceName, ActivityPolicy policy, IList<ScriptActivityScriptBlock> scripts, ScriptActivityTypePropertiesLogSettings logSettings = default(ScriptActivityTypePropertiesLogSettings))
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
{
Scripts = scripts;
LogSettings = logSettings;
CustomInit();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
using System.Collections.Generic;

namespace Microsoft.Azure.Management.DataFactory.Models
{
public partial class SynapseSparkJobDefinitionActivity : ExecutionActivity
{
/// <summary>
/// Initializes a new instance of the SynapseSparkJobDefinitionActivity
/// class.
/// </summary>
/// <param name="name">Activity name.</param>
/// <param name="sparkJob">Synapse spark job reference.</param>
/// <param name="additionalProperties">Unmatched properties from the
/// message are deserialized this collection</param>
/// <param name="description">Activity description.</param>
/// <param name="dependsOn">Activity depends on condition.</param>
/// <param name="userProperties">Activity user properties.</param>
/// <param name="linkedServiceName">Linked service reference.</param>
/// <param name="policy">Activity policy.</param>
/// <param name="arguments">User specified arguments to
/// SynapseSparkJobDefinitionActivity.</param>
/// <param name="file">The main file used for the job, which will
/// override the 'file' of the spark job definition you provide. Type:
/// string (or Expression with resultType string).</param>
/// <param name="className">The fully-qualified identifier or the main
/// class that is in the main definition file, which will override the
/// 'className' of the spark job definition you provide. Type: string
/// (or Expression with resultType string).</param>
/// <param name="files">Additional files used for reference in the main
/// definition file, which will override the 'files' of the spark job
/// definition you provide.</param>
/// <param name="targetBigDataPool">The name of the big data pool which
/// will be used to execute the spark batch job, which will override
/// the 'targetBigDataPool' of the spark job definition you
/// provide.</param>
/// <param name="executorSize">Number of core and memory to be used for
/// executors allocated in the specified Spark pool for the job, which
/// will be used for overriding 'executorCores' and 'executorMemory' of
/// the spark job definition you provide. Type: string (or Expression
/// with resultType string).</param>
/// <param name="conf">Spark configuration properties, which will
/// override the 'conf' of the spark job definition you
/// provide.</param>
/// <param name="driverSize">Number of core and memory to be used for
/// driver allocated in the specified Spark pool for the job, which
/// will be used for overriding 'driverCores' and 'driverMemory' of the
/// spark job definition you provide. Type: string (or Expression with
/// resultType string).</param>
/// <param name="numExecutors">Number of executors to launch for this
/// job, which will override the 'numExecutors' of the spark job
/// definition you provide.</param>
public SynapseSparkJobDefinitionActivity(string name, SynapseSparkJobReference sparkJob, IDictionary<string, object> additionalProperties, string description, IList<ActivityDependency> dependsOn, IList<UserProperty> userProperties, LinkedServiceReference linkedServiceName, ActivityPolicy policy, IList<object> arguments, object file, object className, IList<object> files, BigDataPoolParametrizationReference targetBigDataPool, object executorSize = default(object), object conf = default(object), object driverSize = default(object), int? numExecutors = default(int?))
: base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy)
{
SparkJob = sparkJob;
Arguments = arguments;
File = file;
ClassName = className;
Files = files;
TargetBigDataPool = targetBigDataPool;
ExecutorSize = executorSize;
Conf = conf;
DriverSize = driverSize;
NumExecutors = numExecutors;
CustomInit();
}
}
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit ff303ad

Please sign in to comment.