diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs index c92a2ec484981..b0167d6ac7be5 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/api/Azure.ResourceManager.DataFactory.netstandard2.0.cs @@ -1278,7 +1278,7 @@ public partial class AzureFunctionActivity : Azure.ResourceManager.DataFactory.M public AzureFunctionActivity(string name, Azure.ResourceManager.DataFactory.Models.AzureFunctionActivityMethod method, Azure.Core.Expressions.DataFactory.DataFactoryElement functionName) : base (default(string)) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement Body { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement FunctionName { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement Headers { get { throw null; } set { } } + public System.Collections.Generic.IDictionary> Headers { get { throw null; } } public Azure.ResourceManager.DataFactory.Models.AzureFunctionActivityMethod Method { get { throw null; } set { } } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] @@ -1306,13 +1306,13 @@ public AzureFunctionActivity(string name, Azure.ResourceManager.DataFactory.Mode } public partial class AzureFunctionLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties { - public AzureFunctionLinkedService(System.BinaryData functionAppUri) { } + public AzureFunctionLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement functionAppUri) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement Authentication { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.DataFactoryCredentialReference Credential { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } - public System.BinaryData FunctionAppUri { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement FunctionAppUri { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition FunctionKey { get { throw null; } set { } } - public System.BinaryData ResourceId { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ResourceId { get { throw null; } set { } } } public partial class AzureKeyVaultLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties { @@ -1348,7 +1348,7 @@ public partial class AzureMLExecutePipelineActivity : Azure.ResourceManager.Data { public AzureMLExecutePipelineActivity(string name) : base (default(string)) { } public Azure.Core.Expressions.DataFactory.DataFactoryElement ContinueOnStepFailure { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement> DataPathAssignments { get { throw null; } set { } } + public System.BinaryData DataPathAssignments { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ExperimentName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement MLParentRunId { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement MLPipelineEndpointId { get { throw null; } set { } } @@ -1548,7 +1548,7 @@ public partial class AzureSqlSource : Azure.ResourceManager.DataFactory.Models.T { public AzureSqlSource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement IsolationLevel { get { throw null; } set { } } - public System.BinaryData PartitionOption { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PartitionOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlPartitionSettings PartitionSettings { get { throw null; } set { } } public System.BinaryData ProduceAdditionalTypes { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderQuery { get { throw null; } set { } } @@ -1729,10 +1729,10 @@ public ChainingTrigger(Azure.ResourceManager.DataFactory.Models.TriggerPipelineR } public partial class CmdkeySetup : Azure.ResourceManager.DataFactory.Models.CustomSetupBase { - public CmdkeySetup(System.BinaryData targetName, System.BinaryData userName, Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition password) { } + public CmdkeySetup(Azure.Core.Expressions.DataFactory.DataFactoryElement targetName, Azure.Core.Expressions.DataFactory.DataFactoryElement userName, Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition password) { } public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition Password { get { throw null; } set { } } - public System.BinaryData TargetName { get { throw null; } set { } } - public System.BinaryData UserName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement TargetName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement UserName { get { throw null; } set { } } } public partial class CommonDataServiceForAppsEntityDataset : Azure.ResourceManager.DataFactory.Models.DataFactoryDatasetProperties { @@ -3878,12 +3878,12 @@ public HDInsightMapReduceActivity(string name, Azure.Core.Expressions.DataFactor } public partial class HDInsightOnDemandLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties { - public HDInsightOnDemandLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement clusterSize, Azure.Core.Expressions.DataFactory.DataFactoryElement timeToLiveExpression, Azure.Core.Expressions.DataFactory.DataFactoryElement version, Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName, Azure.Core.Expressions.DataFactory.DataFactoryElement hostSubscriptionId, Azure.Core.Expressions.DataFactory.DataFactoryElement tenant, Azure.Core.Expressions.DataFactory.DataFactoryElement clusterResourceGroup) { } + public HDInsightOnDemandLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement clusterSize, Azure.Core.Expressions.DataFactory.DataFactoryElement timeToLiveExpression, Azure.Core.Expressions.DataFactory.DataFactoryElement version, Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName, Azure.Core.Expressions.DataFactory.DataFactoryElement hostSubscriptionId, Azure.Core.Expressions.DataFactory.DataFactoryElement tenant, Azure.Core.Expressions.DataFactory.DataFactoryElement clusterResourceGroup) { } public System.Collections.Generic.IList AdditionalLinkedServiceNames { get { throw null; } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ClusterNamePrefix { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition ClusterPassword { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ClusterResourceGroup { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement ClusterSize { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ClusterSize { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition ClusterSshPassword { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ClusterSshUserName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ClusterType { get { throw null; } set { } } @@ -3917,7 +3917,7 @@ public HDInsightOnDemandLinkedService(Azure.Core.Expressions.DataFactory.DataFac public partial class HDInsightPigActivity : Azure.ResourceManager.DataFactory.Models.ExecutionActivity { public HDInsightPigActivity(string name) : base (default(string)) { } - public System.BinaryData Arguments { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> Arguments { get { throw null; } set { } } public System.Collections.Generic.IDictionary Defines { get { throw null; } } public Azure.ResourceManager.DataFactory.Models.HDInsightActivityDebugInfoOptionSetting? GetDebugInfo { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference ScriptLinkedService { get { throw null; } set { } } @@ -4068,7 +4068,7 @@ public partial class HttpLinkedService : Azure.ResourceManager.DataFactory.Model { public HttpLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement uri) { } public Azure.ResourceManager.DataFactory.Models.HttpAuthenticationType? AuthenticationType { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement AuthHeaders { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> AuthHeaders { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement CertThumbprint { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement EmbeddedCertData { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement EnableServerCertificateValidation { get { throw null; } set { } } @@ -4937,8 +4937,13 @@ public partial class MariaDBLinkedService : Azure.ResourceManager.DataFactory.Mo { public MariaDBLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Database { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement DriverVersion { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecretReference Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Port { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } } public partial class MariaDBSource : Azure.ResourceManager.DataFactory.Models.TabularSource { @@ -5107,10 +5112,17 @@ public MultiplePipelineTrigger() { } } public partial class MySqlLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties { - public MySqlLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement connectionString) { } + public MySqlLinkedService() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Database { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement DriverVersion { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecretReference Password { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Port { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Server { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SslMode { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement Username { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement UseSystemTrustStore { get { throw null; } set { } } } public partial class MySqlSource : Azure.ResourceManager.DataFactory.Models.TabularSource { @@ -5238,7 +5250,7 @@ public ODataLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement< public Azure.Core.Expressions.DataFactory.DataFactoryElement AadResourceId { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.ODataAadServicePrincipalCredentialType? AadServicePrincipalCredentialType { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.ODataAuthenticationType? AuthenticationType { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement AuthHeaders { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> AuthHeaders { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement AzureCloudType { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition Password { get { throw null; } set { } } @@ -5893,8 +5905,8 @@ public RestServiceLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryEl public partial class RestSink : Azure.ResourceManager.DataFactory.Models.CopySink { public RestSink() { } - public Azure.Core.Expressions.DataFactory.DataFactoryElement AdditionalHeaders { get { throw null; } set { } } - public System.BinaryData HttpCompressionType { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> AdditionalHeaders { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement HttpCompressionType { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement HttpRequestTimeout { get { throw null; } set { } } public System.BinaryData RequestInterval { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement RequestMethod { get { throw null; } set { } } @@ -5902,7 +5914,7 @@ public RestSink() { } public partial class RestSource : Azure.ResourceManager.DataFactory.Models.CopyActivitySource { public RestSource() { } - public System.BinaryData AdditionalColumns { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement> AdditionalColumns { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement AdditionalHeaders { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement HttpRequestTimeout { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement PaginationRules { get { throw null; } set { } } @@ -6095,6 +6107,35 @@ public SalesforceServiceCloudSource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement Query { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ReadBehavior { get { throw null; } set { } } } + public partial class SalesforceServiceCloudV2LinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties + { + public SalesforceServiceCloudV2LinkedService() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ApiVersion { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ClientId { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition ClientSecret { get { throw null; } set { } } + public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EnvironmentUri { get { throw null; } set { } } + } + public partial class SalesforceServiceCloudV2ObjectDataset : Azure.ResourceManager.DataFactory.Models.DataFactoryDatasetProperties + { + public SalesforceServiceCloudV2ObjectDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName) : base (default(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference)) { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ObjectApiName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReportId { get { throw null; } set { } } + } + public partial class SalesforceServiceCloudV2Sink : Azure.ResourceManager.DataFactory.Models.CopySink + { + public SalesforceServiceCloudV2Sink() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ExternalIdFieldName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior? WriteBehavior { get { throw null; } set { } } + } + public partial class SalesforceServiceCloudV2Source : Azure.ResourceManager.DataFactory.Models.CopyActivitySource + { + public SalesforceServiceCloudV2Source() { } + public System.BinaryData AdditionalColumns { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReadBehavior { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SoqlQuery { get { throw null; } set { } } + } public partial class SalesforceSink : Azure.ResourceManager.DataFactory.Models.CopySink { public SalesforceSink() { } @@ -6126,6 +6167,52 @@ public SalesforceSource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement Query { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ReadBehavior { get { throw null; } set { } } } + public partial class SalesforceV2LinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties + { + public SalesforceV2LinkedService() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ApiVersion { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ClientId { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactorySecretBaseDefinition ClientSecret { get { throw null; } set { } } + public string EncryptedCredential { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement EnvironmentUri { get { throw null; } set { } } + } + public partial class SalesforceV2ObjectDataset : Azure.ResourceManager.DataFactory.Models.DataFactoryDatasetProperties + { + public SalesforceV2ObjectDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName) : base (default(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference)) { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ObjectApiName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReportId { get { throw null; } set { } } + } + public partial class SalesforceV2Sink : Azure.ResourceManager.DataFactory.Models.CopySink + { + public SalesforceV2Sink() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ExternalIdFieldName { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement IgnoreNullValues { get { throw null; } set { } } + public Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior? WriteBehavior { get { throw null; } set { } } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct SalesforceV2SinkWriteBehavior : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public SalesforceV2SinkWriteBehavior(string value) { throw null; } + public static Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior Insert { get { throw null; } } + public static Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior Upsert { get { throw null; } } + public bool Equals(Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior left, Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior right) { throw null; } + public static implicit operator Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior (string value) { throw null; } + public static bool operator !=(Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior left, Azure.ResourceManager.DataFactory.Models.SalesforceV2SinkWriteBehavior right) { throw null; } + public override string ToString() { throw null; } + } + public partial class SalesforceV2Source : Azure.ResourceManager.DataFactory.Models.TabularSource + { + public SalesforceV2Source() { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReadBehavior { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement SoqlQuery { get { throw null; } set { } } + } public partial class SapBWCubeDataset : Azure.ResourceManager.DataFactory.Models.DataFactoryDatasetProperties { public SapBWCubeDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName) : base (default(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceReference)) { } @@ -6731,8 +6818,8 @@ public SnowflakeImportCopyCommand() { } } public partial class SnowflakeLinkedService : Azure.ResourceManager.DataFactory.Models.DataFactoryLinkedServiceProperties { - public SnowflakeLinkedService(System.BinaryData connectionString) { } - public System.BinaryData ConnectionString { get { throw null; } set { } } + public SnowflakeLinkedService(Azure.Core.Expressions.DataFactory.DataFactoryElement connectionString) { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ConnectionString { get { throw null; } set { } } public string EncryptedCredential { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryKeyVaultSecretReference Password { get { throw null; } set { } } } @@ -6920,7 +7007,7 @@ public partial class SqlDWSource : Azure.ResourceManager.DataFactory.Models.Tabu { public SqlDWSource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement IsolationLevel { get { throw null; } set { } } - public System.BinaryData PartitionOption { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PartitionOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlPartitionSettings PartitionSettings { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderQuery { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderStoredProcedureName { get { throw null; } set { } } @@ -6943,13 +7030,13 @@ public SqlMISink() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement StoredProcedureTableTypeParameterName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement TableOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlUpsertSettings UpsertSettings { get { throw null; } set { } } - public System.BinaryData WriteBehavior { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement WriteBehavior { get { throw null; } set { } } } public partial class SqlMISource : Azure.ResourceManager.DataFactory.Models.TabularSource { public SqlMISource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement IsolationLevel { get { throw null; } set { } } - public System.BinaryData PartitionOption { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PartitionOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlPartitionSettings PartitionSettings { get { throw null; } set { } } public System.BinaryData ProduceAdditionalTypes { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderQuery { get { throw null; } set { } } @@ -6983,13 +7070,13 @@ public SqlServerSink() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement StoredProcedureTableTypeParameterName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement TableOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlUpsertSettings UpsertSettings { get { throw null; } set { } } - public System.BinaryData WriteBehavior { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement WriteBehavior { get { throw null; } set { } } } public partial class SqlServerSource : Azure.ResourceManager.DataFactory.Models.TabularSource { public SqlServerSource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement IsolationLevel { get { throw null; } set { } } - public System.BinaryData PartitionOption { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PartitionOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlPartitionSettings PartitionSettings { get { throw null; } set { } } public System.BinaryData ProduceAdditionalTypes { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderQuery { get { throw null; } set { } } @@ -7020,13 +7107,13 @@ public SqlSink() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement StoredProcedureTableTypeParameterName { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement TableOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlUpsertSettings UpsertSettings { get { throw null; } set { } } - public System.BinaryData WriteBehavior { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement WriteBehavior { get { throw null; } set { } } } public partial class SqlSource : Azure.ResourceManager.DataFactory.Models.TabularSource { public SqlSource() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement IsolationLevel { get { throw null; } set { } } - public System.BinaryData PartitionOption { get { throw null; } set { } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement PartitionOption { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SqlPartitionSettings PartitionSettings { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderQuery { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement SqlReaderStoredProcedureName { get { throw null; } set { } } @@ -7252,6 +7339,7 @@ public StoreWriteSettings() { } public Azure.Core.Expressions.DataFactory.DataFactoryElement CopyBehavior { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement DisableMetricsCollection { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement MaxConcurrentConnections { get { throw null; } set { } } + public System.Collections.Generic.IList Metadata { get { throw null; } } } public partial class SwitchActivity : Azure.ResourceManager.DataFactory.Models.ControlActivity { @@ -7347,8 +7435,8 @@ public SynapseSparkJobDefinitionActivity(string name, Azure.ResourceManager.Data } public partial class SynapseSparkJobReference { - public SynapseSparkJobReference(Azure.ResourceManager.DataFactory.Models.SparkJobReferenceType sparkJobReferenceType, System.BinaryData referenceName) { } - public System.BinaryData ReferenceName { get { throw null; } set { } } + public SynapseSparkJobReference(Azure.ResourceManager.DataFactory.Models.SparkJobReferenceType sparkJobReferenceType, Azure.Core.Expressions.DataFactory.DataFactoryElement referenceName) { } + public Azure.Core.Expressions.DataFactory.DataFactoryElement ReferenceName { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SparkJobReferenceType SparkJobReferenceType { get { throw null; } set { } } } public partial class TabularSource : Azure.ResourceManager.DataFactory.Models.CopyActivitySource @@ -7557,9 +7645,11 @@ public WebActivity(string name, Azure.ResourceManager.DataFactory.Models.WebActi public Azure.ResourceManager.DataFactory.Models.IntegrationRuntimeReference ConnectVia { get { throw null; } set { } } public System.Collections.Generic.IList Datasets { get { throw null; } } public bool? DisableCertValidation { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement Headers { get { throw null; } set { } } + public System.Collections.Generic.IDictionary> Headers { get { throw null; } } + public Azure.Core.Expressions.DataFactory.DataFactoryElement HttpRequestTimeout { get { throw null; } set { } } public System.Collections.Generic.IList LinkedServices { get { throw null; } } public Azure.ResourceManager.DataFactory.Models.WebActivityMethod Method { get { throw null; } set { } } + public bool? TurnOffAsync { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Uri { get { throw null; } set { } } } public partial class WebActivityAuthentication @@ -7614,7 +7704,7 @@ public partial class WebHookActivity : Azure.ResourceManager.DataFactory.Models. public WebHookActivity(string name, Azure.ResourceManager.DataFactory.Models.WebHookActivityMethod method, Azure.Core.Expressions.DataFactory.DataFactoryElement uri) : base (default(string)) { } public Azure.ResourceManager.DataFactory.Models.WebActivityAuthentication Authentication { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement Body { get { throw null; } set { } } - public Azure.Core.Expressions.DataFactory.DataFactoryElement Headers { get { throw null; } set { } } + public System.Collections.Generic.IDictionary> Headers { get { throw null; } } public Azure.ResourceManager.DataFactory.Models.WebHookActivityMethod Method { get { throw null; } set { } } public Azure.ResourceManager.DataFactory.Models.SecureInputOutputPolicy Policy { get { throw null; } set { } } public Azure.Core.Expressions.DataFactory.DataFactoryElement ReportStatusOnCallBack { get { throw null; } set { } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryDatasetData.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryDatasetData.cs index 6d4451f2f1a9c..07c06b9d7c137 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryDatasetData.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryDatasetData.cs @@ -23,7 +23,7 @@ public partial class DataFactoryDatasetData : ResourceData /// /// Dataset properties. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// is null. public DataFactoryDatasetData(DataFactoryDatasetProperties properties) @@ -41,7 +41,7 @@ public DataFactoryDatasetData(DataFactoryDatasetProperties properties) /// /// Dataset properties. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// Etag identifies change in the resource. internal DataFactoryDatasetData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, DataFactoryDatasetProperties properties, ETag? eTag) : base(id, name, resourceType, systemData) @@ -53,7 +53,7 @@ internal DataFactoryDatasetData(ResourceIdentifier id, string name, ResourceType /// /// Dataset properties. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public DataFactoryDatasetProperties Properties { get; set; } /// Etag identifies change in the resource. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryLinkedServiceData.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryLinkedServiceData.cs index 1f1346b2d0239..2120dff9750e4 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryLinkedServiceData.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/DataFactoryLinkedServiceData.cs @@ -23,7 +23,7 @@ public partial class DataFactoryLinkedServiceData : ResourceData /// /// Properties of linked service. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// is null. public DataFactoryLinkedServiceData(DataFactoryLinkedServiceProperties properties) @@ -41,7 +41,7 @@ public DataFactoryLinkedServiceData(DataFactoryLinkedServiceProperties propertie /// /// Properties of linked service. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// Etag identifies change in the resource. internal DataFactoryLinkedServiceData(ResourceIdentifier id, string name, ResourceType resourceType, SystemData systemData, DataFactoryLinkedServiceProperties properties, ETag? eTag) : base(id, name, resourceType, systemData) @@ -53,7 +53,7 @@ internal DataFactoryLinkedServiceData(ResourceIdentifier id, string name, Resour /// /// Properties of linked service. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public DataFactoryLinkedServiceProperties Properties { get; set; } /// Etag identifies change in the resource. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.Serialization.cs index 8c907b97248f7..eb111b007dd2f 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.Serialization.cs @@ -40,6 +40,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -66,6 +76,7 @@ internal static AzureBlobFSWriteSettings DeserializeAzureBlobFSWriteSettings(Jso Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -111,10 +122,24 @@ internal static AzureBlobFSWriteSettings DeserializeAzureBlobFSWriteSettings(Jso copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new AzureBlobFSWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties, blockSizeInMB.Value); + return new AzureBlobFSWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties, blockSizeInMB.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.cs index b261d29d953ee..bcc253dcff5ad 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobFSWriteSettings.cs @@ -25,9 +25,10 @@ public AzureBlobFSWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. /// Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - internal AzureBlobFSWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties, DataFactoryElement blockSizeInMB) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal AzureBlobFSWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties, DataFactoryElement blockSizeInMB) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { BlockSizeInMB = blockSizeInMB; StoreWriteSettingsType = storeWriteSettingsType ?? "AzureBlobFSWriteSettings"; diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.Serialization.cs index a6ab2040f0bf4..86a4fef07f9d6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.Serialization.cs @@ -40,6 +40,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -66,6 +76,7 @@ internal static AzureBlobStorageWriteSettings DeserializeAzureBlobStorageWriteSe Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -111,10 +122,24 @@ internal static AzureBlobStorageWriteSettings DeserializeAzureBlobStorageWriteSe copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new AzureBlobStorageWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties, blockSizeInMB.Value); + return new AzureBlobStorageWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties, blockSizeInMB.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.cs index ac9b587a5b3c2..a7b937d7d0b3e 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureBlobStorageWriteSettings.cs @@ -25,9 +25,10 @@ public AzureBlobStorageWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. /// Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). - internal AzureBlobStorageWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties, DataFactoryElement blockSizeInMB) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal AzureBlobStorageWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties, DataFactoryElement blockSizeInMB) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { BlockSizeInMB = blockSizeInMB; StoreWriteSettingsType = storeWriteSettingsType ?? "AzureBlobStorageWriteSettings"; diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.Serialization.cs index b7437cf8c80d8..85fdb2da426e6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.Serialization.cs @@ -40,6 +40,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -66,6 +76,7 @@ internal static AzureDataLakeStoreWriteSettings DeserializeAzureDataLakeStoreWri Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -111,10 +122,24 @@ internal static AzureDataLakeStoreWriteSettings DeserializeAzureDataLakeStoreWri copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new AzureDataLakeStoreWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties, expiryDateTime.Value); + return new AzureDataLakeStoreWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties, expiryDateTime.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.cs index 305fda3919b28..09bfd98d048af 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureDataLakeStoreWriteSettings.cs @@ -25,9 +25,10 @@ public AzureDataLakeStoreWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. /// Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: string (or Expression with resultType string). - internal AzureDataLakeStoreWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties, DataFactoryElement expiryDateTime) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal AzureDataLakeStoreWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties, DataFactoryElement expiryDateTime) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { ExpiryDateTime = expiryDateTime; StoreWriteSettingsType = storeWriteSettingsType ?? "AzureDataLakeStoreWriteSettings"; diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.Serialization.cs index 8f82d98a310d2..ff792dac1bfb2 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.Serialization.cs @@ -35,6 +35,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -60,6 +70,7 @@ internal static AzureFileStorageWriteSettings DeserializeAzureFileStorageWriteSe Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -96,10 +107,24 @@ internal static AzureFileStorageWriteSettings DeserializeAzureFileStorageWriteSe copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new AzureFileStorageWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties); + return new AzureFileStorageWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs index 94e77bd9159c9..fc4da910952a4 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFileStorageWriteSettings.cs @@ -25,8 +25,9 @@ public AzureFileStorageWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. - internal AzureFileStorageWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal AzureFileStorageWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { StoreWriteSettingsType = storeWriteSettingsType ?? "AzureFileStorageWriteSettings"; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.Serialization.cs index edafde6e9fba9..4f1a2a9a5de32 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.Serialization.cs @@ -73,10 +73,21 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WriteStringValue(Method.ToString()); writer.WritePropertyName("functionName"u8); JsonSerializer.Serialize(writer, FunctionName); - if (Optional.IsDefined(Headers)) + if (Optional.IsCollectionDefined(Headers)) { writer.WritePropertyName("headers"u8); - JsonSerializer.Serialize(writer, Headers); + writer.WriteStartObject(); + foreach (var item in Headers) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + JsonSerializer.Serialize(writer, item.Value); + } + writer.WriteEndObject(); } if (Optional.IsDefined(Body)) { @@ -116,7 +127,7 @@ internal static AzureFunctionActivity DeserializeAzureFunctionActivity(JsonEleme Optional> userProperties = default; AzureFunctionActivityMethod method = default; DataFactoryElement functionName = default; - Optional> headers = default; + Optional>> headers = default; Optional> body = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); @@ -226,7 +237,19 @@ internal static AzureFunctionActivity DeserializeAzureFunctionActivity(JsonEleme { continue; } - headers = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + Dictionary> dictionary = new Dictionary>(); + foreach (var property1 in property0.Value.EnumerateObject()) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property1.Name, null); + } + else + { + dictionary.Add(property1.Name, JsonSerializer.Deserialize>(property1.Value.GetRawText())); + } + } + headers = dictionary; continue; } if (property0.NameEquals("body"u8)) @@ -244,7 +267,7 @@ internal static AzureFunctionActivity DeserializeAzureFunctionActivity(JsonEleme additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new AzureFunctionActivity(name, type, description.Value, Optional.ToNullable(state), Optional.ToNullable(onInactiveMarkAs), Optional.ToList(dependsOn), Optional.ToList(userProperties), additionalProperties, linkedServiceName, policy.Value, method, functionName, headers.Value, body.Value); + return new AzureFunctionActivity(name, type, description.Value, Optional.ToNullable(state), Optional.ToNullable(onInactiveMarkAs), Optional.ToList(dependsOn), Optional.ToList(userProperties), additionalProperties, linkedServiceName, policy.Value, method, functionName, Optional.ToDictionary(headers), body.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.cs index 11bd664d06ae9..939ad437715a1 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionActivity.cs @@ -27,6 +27,7 @@ public AzureFunctionActivity(string name, AzureFunctionActivityMethod method, Da Method = method; FunctionName = functionName; + Headers = new ChangeTrackingDictionary>(); ActivityType = "AzureFunctionActivity"; } @@ -45,7 +46,7 @@ public AzureFunctionActivity(string name, AzureFunctionActivityMethod method, Da /// Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string). /// Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). /// Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). - internal AzureFunctionActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, AzureFunctionActivityMethod method, DataFactoryElement functionName, DataFactoryElement headers, DataFactoryElement body) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) + internal AzureFunctionActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, AzureFunctionActivityMethod method, DataFactoryElement functionName, IDictionary> headers, DataFactoryElement body) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) { Method = method; FunctionName = functionName; @@ -59,7 +60,7 @@ internal AzureFunctionActivity(string name, string activityType, string descript /// Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string). public DataFactoryElement FunctionName { get; set; } /// Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - public DataFactoryElement Headers { get; set; } + public IDictionary> Headers { get; } /// Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). public DataFactoryElement Body { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.Serialization.cs index 8814a5f157edb..0ebcc54610bd0 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.Serialization.cs @@ -66,14 +66,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("typeProperties"u8); writer.WriteStartObject(); writer.WritePropertyName("functionAppUrl"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(FunctionAppUri); -#else - using (JsonDocument document = JsonDocument.Parse(FunctionAppUri)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, FunctionAppUri); if (Optional.IsDefined(FunctionKey)) { writer.WritePropertyName("functionKey"u8); @@ -92,14 +85,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(ResourceId)) { writer.WritePropertyName("resourceId"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(ResourceId); -#else - using (JsonDocument document = JsonDocument.Parse(ResourceId)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, ResourceId); } if (Optional.IsDefined(Authentication)) { @@ -133,11 +119,11 @@ internal static AzureFunctionLinkedService DeserializeAzureFunctionLinkedService Optional description = default; Optional> parameters = default; Optional> annotations = default; - BinaryData functionAppUrl = default; + DataFactoryElement functionAppUrl = default; Optional functionKey = default; Optional encryptedCredential = default; Optional credential = default; - Optional resourceId = default; + Optional> resourceId = default; Optional> authentication = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); @@ -208,7 +194,7 @@ internal static AzureFunctionLinkedService DeserializeAzureFunctionLinkedService { if (property0.NameEquals("functionAppUrl"u8)) { - functionAppUrl = BinaryData.FromString(property0.Value.GetRawText()); + functionAppUrl = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("functionKey"u8)) @@ -240,7 +226,7 @@ internal static AzureFunctionLinkedService DeserializeAzureFunctionLinkedService { continue; } - resourceId = BinaryData.FromString(property0.Value.GetRawText()); + resourceId = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("authentication"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.cs index 255d33f012e32..c7c55875f7f40 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureFunctionLinkedService.cs @@ -16,9 +16,9 @@ namespace Azure.ResourceManager.DataFactory.Models public partial class AzureFunctionLinkedService : DataFactoryLinkedServiceProperties { /// Initializes a new instance of . - /// The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. + /// The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. Type: string (or Expression with resultType string). /// is null. - public AzureFunctionLinkedService(BinaryData functionAppUri) + public AzureFunctionLinkedService(DataFactoryElement functionAppUri) { Argument.AssertNotNull(functionAppUri, nameof(functionAppUri)); @@ -33,13 +33,13 @@ public AzureFunctionLinkedService(BinaryData functionAppUri) /// Parameters for linked service. /// List of tags that can be used for describing the linked service. /// Additional Properties. - /// The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. + /// The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. Type: string (or Expression with resultType string). /// Function or Host key for Azure Function App. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. /// The credential reference containing authentication information. - /// Allowed token audiences for azure function. + /// Allowed token audiences for azure function. Type: string (or Expression with resultType string). /// Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). - internal AzureFunctionLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, BinaryData functionAppUri, DataFactorySecretBaseDefinition functionKey, string encryptedCredential, DataFactoryCredentialReference credential, BinaryData resourceId, DataFactoryElement authentication) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal AzureFunctionLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement functionAppUri, DataFactorySecretBaseDefinition functionKey, string encryptedCredential, DataFactoryCredentialReference credential, DataFactoryElement resourceId, DataFactoryElement authentication) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { FunctionAppUri = functionAppUri; FunctionKey = functionKey; @@ -50,74 +50,16 @@ internal AzureFunctionLinkedService(string linkedServiceType, IntegrationRuntime LinkedServiceType = linkedServiceType ?? "AzureFunction"; } - /// - /// The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData FunctionAppUri { get; set; } + /// The endpoint of the Azure Function App. URL will be in the format https://<accountName>.azurewebsites.net. Type: string (or Expression with resultType string). + public DataFactoryElement FunctionAppUri { get; set; } /// Function or Host key for Azure Function App. public DataFactorySecretBaseDefinition FunctionKey { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. public string EncryptedCredential { get; set; } /// The credential reference containing authentication information. public DataFactoryCredentialReference Credential { get; set; } - /// - /// Allowed token audiences for azure function. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData ResourceId { get; set; } + /// Allowed token audiences for azure function. Type: string (or Expression with resultType string). + public DataFactoryElement ResourceId { get; set; } /// Type of authentication (Required to specify MSI) used to connect to AzureFunction. Type: string (or Expression with resultType string). public DataFactoryElement Authentication { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.Serialization.cs index 71abaa7da6d20..0504decde5b78 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.Serialization.cs @@ -97,7 +97,14 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(DataPathAssignments)) { writer.WritePropertyName("dataPathAssignments"u8); - JsonSerializer.Serialize(writer, DataPathAssignments); +#if NET6_0_OR_GREATER + writer.WriteRawValue(DataPathAssignments); +#else + using (JsonDocument document = JsonDocument.Parse(DataPathAssignments)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif } if (Optional.IsDefined(MLParentRunId)) { @@ -145,7 +152,7 @@ internal static AzureMLExecutePipelineActivity DeserializeAzureMLExecutePipeline Optional> version = default; Optional> experimentName = default; Optional>> mlPipelineParameters = default; - Optional>> dataPathAssignments = default; + Optional dataPathAssignments = default; Optional> mlParentRunId = default; Optional> continueOnStepFailure = default; IDictionary additionalProperties = default; @@ -291,7 +298,7 @@ internal static AzureMLExecutePipelineActivity DeserializeAzureMLExecutePipeline { continue; } - dataPathAssignments = JsonSerializer.Deserialize>>(property0.Value.GetRawText()); + dataPathAssignments = BinaryData.FromString(property0.Value.GetRawText()); continue; } if (property0.NameEquals("mlParentRunId"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.cs index 039b4f6767667..4c5b00186a602 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureMLExecutePipelineActivity.cs @@ -41,10 +41,10 @@ public AzureMLExecutePipelineActivity(string name) : base(name) /// Version of the published Azure ML pipeline endpoint. Type: string (or Expression with resultType string). /// Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). /// Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - /// Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). + /// Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with resultType object). /// The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). /// Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). - internal AzureMLExecutePipelineActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, DataFactoryElement mlPipelineId, DataFactoryElement mlPipelineEndpointId, DataFactoryElement version, DataFactoryElement experimentName, DataFactoryElement> mlPipelineParameters, DataFactoryElement> dataPathAssignments, DataFactoryElement mlParentRunId, DataFactoryElement continueOnStepFailure) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) + internal AzureMLExecutePipelineActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, DataFactoryElement mlPipelineId, DataFactoryElement mlPipelineEndpointId, DataFactoryElement version, DataFactoryElement experimentName, DataFactoryElement> mlPipelineParameters, BinaryData dataPathAssignments, DataFactoryElement mlParentRunId, DataFactoryElement continueOnStepFailure) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) { MLPipelineId = mlPipelineId; MLPipelineEndpointId = mlPipelineEndpointId; @@ -67,8 +67,37 @@ internal AzureMLExecutePipelineActivity(string name, string activityType, string public DataFactoryElement ExperimentName { get; set; } /// Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). public DataFactoryElement> MLPipelineParameters { get; set; } - /// Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). - public DataFactoryElement> DataPathAssignments { get; set; } + /// + /// Dictionary used for changing data path assignments without retraining. Values will be passed in the dataPathAssignments property of the published pipeline execution request. Type: object (or Expression with resultType object). + /// + /// To assign an object to this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public BinaryData DataPathAssignments { get; set; } /// The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). public DataFactoryElement MLParentRunId { get; set; } /// Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.Serialization.cs index 69373f9be9f7e..10e2178fe42a3 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.Serialization.cs @@ -60,14 +60,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(PartitionOption)) { writer.WritePropertyName("partitionOption"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(PartitionOption); -#else - using (JsonDocument document = JsonDocument.Parse(PartitionOption)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, PartitionOption); } if (Optional.IsDefined(PartitionSettings)) { @@ -139,7 +132,7 @@ internal static AzureSqlSource DeserializeAzureSqlSource(JsonElement element) Optional storedProcedureParameters = default; Optional> isolationLevel = default; Optional produceAdditionalTypes = default; - Optional partitionOption = default; + Optional> partitionOption = default; Optional partitionSettings = default; Optional> queryTimeout = default; Optional additionalColumns = default; @@ -203,7 +196,7 @@ internal static AzureSqlSource DeserializeAzureSqlSource(JsonElement element) { continue; } - partitionOption = BinaryData.FromString(property.Value.GetRawText()); + partitionOption = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("partitionSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.cs index ebee9b63c789e..c9e85ead29a5a 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/AzureSqlSource.cs @@ -34,9 +34,9 @@ public AzureSqlSource() /// Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). /// Which additional types to produce. - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). /// The settings that will be leveraged for Sql source partitioning. - internal AzureSqlSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData produceAdditionalTypes, BinaryData partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) + internal AzureSqlSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData produceAdditionalTypes, DataFactoryElement partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) { SqlReaderQuery = sqlReaderQuery; SqlReaderStoredProcedureName = sqlReaderStoredProcedureName; @@ -116,37 +116,8 @@ internal AzureSqlSource(string copySourceType, DataFactoryElement sourceRet /// /// public BinaryData ProduceAdditionalTypes { get; set; } - /// - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData PartitionOption { get; set; } + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + public DataFactoryElement PartitionOption { get; set; } /// The settings that will be leveraged for Sql source partitioning. public SqlPartitionSettings PartitionSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.Serialization.cs index 47288becd8664..5251987049e64 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.Serialization.cs @@ -5,7 +5,6 @@ #nullable disable -using System; using System.Text.Json; using Azure.Core; using Azure.Core.Expressions.DataFactory; @@ -22,23 +21,9 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("typeProperties"u8); writer.WriteStartObject(); writer.WritePropertyName("targetName"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(TargetName); -#else - using (JsonDocument document = JsonDocument.Parse(TargetName)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, TargetName); writer.WritePropertyName("userName"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(UserName); -#else - using (JsonDocument document = JsonDocument.Parse(UserName)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, UserName); writer.WritePropertyName("password"u8); JsonSerializer.Serialize(writer, Password); writer.WriteEndObject(); @@ -52,8 +37,8 @@ internal static CmdkeySetup DeserializeCmdkeySetup(JsonElement element) return null; } string type = default; - BinaryData targetName = default; - BinaryData userName = default; + DataFactoryElement targetName = default; + DataFactoryElement userName = default; DataFactorySecretBaseDefinition password = default; foreach (var property in element.EnumerateObject()) { @@ -73,12 +58,12 @@ internal static CmdkeySetup DeserializeCmdkeySetup(JsonElement element) { if (property0.NameEquals("targetName"u8)) { - targetName = BinaryData.FromString(property0.Value.GetRawText()); + targetName = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("userName"u8)) { - userName = BinaryData.FromString(property0.Value.GetRawText()); + userName = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("password"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.cs index 2e5b6a82d3973..d20a05f505803 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CmdkeySetup.cs @@ -15,11 +15,11 @@ namespace Azure.ResourceManager.DataFactory.Models public partial class CmdkeySetup : CustomSetupBase { /// Initializes a new instance of . - /// The server name of data source access. - /// The user name of data source access. + /// The server name of data source access. Type: string. + /// The user name of data source access. Type: string. /// The password of data source access. /// , or is null. - public CmdkeySetup(BinaryData targetName, BinaryData userName, DataFactorySecretBaseDefinition password) + public CmdkeySetup(DataFactoryElement targetName, DataFactoryElement userName, DataFactorySecretBaseDefinition password) { Argument.AssertNotNull(targetName, nameof(targetName)); Argument.AssertNotNull(userName, nameof(userName)); @@ -33,10 +33,10 @@ public CmdkeySetup(BinaryData targetName, BinaryData userName, DataFactorySecret /// Initializes a new instance of . /// The type of custom setup. - /// The server name of data source access. - /// The user name of data source access. + /// The server name of data source access. Type: string. + /// The user name of data source access. Type: string. /// The password of data source access. - internal CmdkeySetup(string customSetupBaseType, BinaryData targetName, BinaryData userName, DataFactorySecretBaseDefinition password) : base(customSetupBaseType) + internal CmdkeySetup(string customSetupBaseType, DataFactoryElement targetName, DataFactoryElement userName, DataFactorySecretBaseDefinition password) : base(customSetupBaseType) { TargetName = targetName; UserName = userName; @@ -44,68 +44,10 @@ internal CmdkeySetup(string customSetupBaseType, BinaryData targetName, BinaryDa CustomSetupBaseType = customSetupBaseType ?? "CmdkeySetup"; } - /// - /// The server name of data source access. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData TargetName { get; set; } - /// - /// The user name of data source access. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData UserName { get; set; } + /// The server name of data source access. Type: string. + public DataFactoryElement TargetName { get; set; } + /// The user name of data source access. Type: string. + public DataFactoryElement UserName { get; set; } /// The password of data source access. public DataFactorySecretBaseDefinition Password { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs index e110455c369a4..cd9805204afa8 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivity.cs @@ -20,12 +20,12 @@ public partial class CopyActivity : ExecutionActivity /// /// Copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// /// Copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// , or is null. public CopyActivity(string name, CopyActivitySource source, CopySink sink) : base(name) @@ -59,12 +59,12 @@ public CopyActivity(string name, CopyActivitySource source, CopySink sink) : bas /// /// Copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// /// Copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// Copy activity translator. If not specified, tabular translator is used. /// Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). @@ -108,13 +108,13 @@ internal CopyActivity(string name, string activityType, string description, Pipe /// /// Copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public CopyActivitySource Source { get; set; } /// /// Copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public CopySink Sink { get; set; } /// diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.Serialization.cs index f70df19f82d4c..842d8647989c7 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.Serialization.cs @@ -133,7 +133,9 @@ internal static CopyActivitySource DeserializeCopyActivitySource(JsonElement ele case "RestSource": return RestSource.DeserializeRestSource(element); case "SalesforceMarketingCloudSource": return SalesforceMarketingCloudSource.DeserializeSalesforceMarketingCloudSource(element); case "SalesforceServiceCloudSource": return SalesforceServiceCloudSource.DeserializeSalesforceServiceCloudSource(element); + case "SalesforceServiceCloudV2Source": return SalesforceServiceCloudV2Source.DeserializeSalesforceServiceCloudV2Source(element); case "SalesforceSource": return SalesforceSource.DeserializeSalesforceSource(element); + case "SalesforceV2Source": return SalesforceV2Source.DeserializeSalesforceV2Source(element); case "SapBwSource": return SapBWSource.DeserializeSapBWSource(element); case "SapCloudForCustomerSource": return SapCloudForCustomerSource.DeserializeSapCloudForCustomerSource(element); case "SapEccSource": return SapEccSource.DeserializeSapEccSource(element); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.cs index ce5cfbd5a834f..181ba13e8d129 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopyActivitySource.cs @@ -15,7 +15,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// A copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public partial class CopyActivitySource { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs index 36d6ef7d0c2bb..8acf02ed8bf91 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.Serialization.cs @@ -105,7 +105,9 @@ internal static CopySink DeserializeCopySink(JsonElement element) case "ParquetSink": return ParquetSink.DeserializeParquetSink(element); case "RestSink": return RestSink.DeserializeRestSink(element); case "SalesforceServiceCloudSink": return SalesforceServiceCloudSink.DeserializeSalesforceServiceCloudSink(element); + case "SalesforceServiceCloudV2Sink": return SalesforceServiceCloudV2Sink.DeserializeSalesforceServiceCloudV2Sink(element); case "SalesforceSink": return SalesforceSink.DeserializeSalesforceSink(element); + case "SalesforceV2Sink": return SalesforceV2Sink.DeserializeSalesforceV2Sink(element); case "SapCloudForCustomerSink": return SapCloudForCustomerSink.DeserializeSapCloudForCustomerSink(element); case "SnowflakeSink": return SnowflakeSink.DeserializeSnowflakeSink(element); case "SqlDWSink": return SqlDWSink.DeserializeSqlDWSink(element); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs index d06c59a53e360..6cde894e3725d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/CopySink.cs @@ -15,7 +15,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// A copy activity sink. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public partial class CopySink { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetDebugInfo.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetDebugInfo.cs index 1f7dd7f99fab1..da01c3aac2662 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetDebugInfo.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetDebugInfo.cs @@ -17,7 +17,7 @@ public partial class DataFactoryDatasetDebugInfo : DataFactoryDebugInfo /// /// Dataset properties. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// is null. public DataFactoryDatasetDebugInfo(DataFactoryDatasetProperties properties) @@ -32,7 +32,7 @@ public DataFactoryDatasetDebugInfo(DataFactoryDatasetProperties properties) /// /// Dataset properties. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// internal DataFactoryDatasetDebugInfo(string name, DataFactoryDatasetProperties properties) : base(name) { @@ -42,7 +42,7 @@ internal DataFactoryDatasetDebugInfo(string name, DataFactoryDatasetProperties p /// /// Dataset properties. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public DataFactoryDatasetProperties Properties { get; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs index ced4b08088ad4..cfb0fbae9a170 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.Serialization.cs @@ -173,6 +173,8 @@ internal static DataFactoryDatasetProperties DeserializeDataFactoryDatasetProper case "SalesforceMarketingCloudObject": return SalesforceMarketingCloudObjectDataset.DeserializeSalesforceMarketingCloudObjectDataset(element); case "SalesforceObject": return SalesforceObjectDataset.DeserializeSalesforceObjectDataset(element); case "SalesforceServiceCloudObject": return SalesforceServiceCloudObjectDataset.DeserializeSalesforceServiceCloudObjectDataset(element); + case "SalesforceServiceCloudV2Object": return SalesforceServiceCloudV2ObjectDataset.DeserializeSalesforceServiceCloudV2ObjectDataset(element); + case "SalesforceV2Object": return SalesforceV2ObjectDataset.DeserializeSalesforceV2ObjectDataset(element); case "SapBwCube": return SapBWCubeDataset.DeserializeSapBWCubeDataset(element); case "SapCloudForCustomerResource": return SapCloudForCustomerResourceDataset.DeserializeSapCloudForCustomerResourceDataset(element); case "SapEccResource": return SapEccResourceDataset.DeserializeSapEccResourceDataset(element); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.cs index c931c027d33b8..3f42c823207e9 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryDatasetProperties.cs @@ -15,7 +15,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public partial class DataFactoryDatasetProperties { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceDebugInfo.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceDebugInfo.cs index ad1b858796526..0e9e6a621daf0 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceDebugInfo.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceDebugInfo.cs @@ -17,7 +17,7 @@ public partial class DataFactoryLinkedServiceDebugInfo : DataFactoryDebugInfo /// /// Properties of linked service. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// is null. public DataFactoryLinkedServiceDebugInfo(DataFactoryLinkedServiceProperties properties) @@ -32,7 +32,7 @@ public DataFactoryLinkedServiceDebugInfo(DataFactoryLinkedServiceProperties prop /// /// Properties of linked service. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// internal DataFactoryLinkedServiceDebugInfo(string name, DataFactoryLinkedServiceProperties properties) : base(name) { @@ -42,7 +42,7 @@ internal DataFactoryLinkedServiceDebugInfo(string name, DataFactoryLinkedService /// /// Properties of linked service. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public DataFactoryLinkedServiceProperties Properties { get; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs index 78d4dd4580d08..8ceecbadb3a25 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.Serialization.cs @@ -174,6 +174,8 @@ internal static DataFactoryLinkedServiceProperties DeserializeDataFactoryLinkedS case "Salesforce": return SalesforceLinkedService.DeserializeSalesforceLinkedService(element); case "SalesforceMarketingCloud": return SalesforceMarketingCloudLinkedService.DeserializeSalesforceMarketingCloudLinkedService(element); case "SalesforceServiceCloud": return SalesforceServiceCloudLinkedService.DeserializeSalesforceServiceCloudLinkedService(element); + case "SalesforceServiceCloudV2": return SalesforceServiceCloudV2LinkedService.DeserializeSalesforceServiceCloudV2LinkedService(element); + case "SalesforceV2": return SalesforceV2LinkedService.DeserializeSalesforceV2LinkedService(element); case "SapBW": return SapBWLinkedService.DeserializeSapBWLinkedService(element); case "SapCloudForCustomer": return SapCloudForCustomerLinkedService.DeserializeSapCloudForCustomerLinkedService(element); case "SapEcc": return SapEccLinkedService.DeserializeSapEccLinkedService(element); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.cs index bcdd8837d612b..c94983574ba48 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/DataFactoryLinkedServiceProperties.cs @@ -14,7 +14,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// The nested object which contains the information and credential which can be used to connect with related store or compute resource. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public partial class DataFactoryLinkedServiceProperties { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.Serialization.cs index 8758f0c5f1b7d..d9a803ee8497d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.Serialization.cs @@ -35,6 +35,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -60,6 +70,7 @@ internal static FileServerWriteSettings DeserializeFileServerWriteSettings(JsonE Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -96,10 +107,24 @@ internal static FileServerWriteSettings DeserializeFileServerWriteSettings(JsonE copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new FileServerWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties); + return new FileServerWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.cs index 6f44b0aedc3d6..c000479d9075f 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/FileServerWriteSettings.cs @@ -25,8 +25,9 @@ public FileServerWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. - internal FileServerWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal FileServerWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { StoreWriteSettingsType = storeWriteSettingsType ?? "FileServerWriteSettings"; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.Serialization.cs index c518e5b7d22ff..2b6664c38a665 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.Serialization.cs @@ -328,7 +328,7 @@ internal static HDInsightOnDemandLinkedService DeserializeHDInsightOnDemandLinke Optional description = default; Optional> parameters = default; Optional> annotations = default; - DataFactoryElement clusterSize = default; + DataFactoryElement clusterSize = default; DataFactoryElement timeToLive = default; DataFactoryElement version = default; DataFactoryLinkedServiceReference linkedServiceName = default; @@ -431,7 +431,7 @@ internal static HDInsightOnDemandLinkedService DeserializeHDInsightOnDemandLinke { if (property0.NameEquals("clusterSize"u8)) { - clusterSize = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + clusterSize = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("timeToLive"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.cs index 6dede96f84023..a5bb14e2b3168 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightOnDemandLinkedService.cs @@ -16,7 +16,7 @@ namespace Azure.ResourceManager.DataFactory.Models public partial class HDInsightOnDemandLinkedService : DataFactoryLinkedServiceProperties { /// Initializes a new instance of . - /// Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). + /// Number of worker/data nodes in the cluster. Suggestion value: 4. Type: int (or Expression with resultType int). /// The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). /// Version of the HDInsight cluster.  Type: string (or Expression with resultType string). /// Azure Storage linked service to be used by the on-demand cluster for storing and processing data. @@ -24,7 +24,7 @@ public partial class HDInsightOnDemandLinkedService : DataFactoryLinkedServicePr /// The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). /// The resource group where the cluster belongs. Type: string (or Expression with resultType string). /// , , , , , or is null. - public HDInsightOnDemandLinkedService(DataFactoryElement clusterSize, DataFactoryElement timeToLiveExpression, DataFactoryElement version, DataFactoryLinkedServiceReference linkedServiceName, DataFactoryElement hostSubscriptionId, DataFactoryElement tenant, DataFactoryElement clusterResourceGroup) + public HDInsightOnDemandLinkedService(DataFactoryElement clusterSize, DataFactoryElement timeToLiveExpression, DataFactoryElement version, DataFactoryLinkedServiceReference linkedServiceName, DataFactoryElement hostSubscriptionId, DataFactoryElement tenant, DataFactoryElement clusterResourceGroup) { Argument.AssertNotNull(clusterSize, nameof(clusterSize)); Argument.AssertNotNull(timeToLiveExpression, nameof(timeToLiveExpression)); @@ -53,7 +53,7 @@ public HDInsightOnDemandLinkedService(DataFactoryElement clusterSize, Da /// Parameters for linked service. /// List of tags that can be used for describing the linked service. /// Additional Properties. - /// Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). + /// Number of worker/data nodes in the cluster. Suggestion value: 4. Type: int (or Expression with resultType int). /// The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). /// Version of the HDInsight cluster.  Type: string (or Expression with resultType string). /// Azure Storage linked service to be used by the on-demand cluster for storing and processing data. @@ -87,7 +87,7 @@ public HDInsightOnDemandLinkedService(DataFactoryElement clusterSize, Da /// The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). /// The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). /// The credential reference containing authentication information. - internal HDInsightOnDemandLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement clusterSize, DataFactoryElement timeToLiveExpression, DataFactoryElement version, DataFactoryLinkedServiceReference linkedServiceName, DataFactoryElement hostSubscriptionId, DataFactoryElement servicePrincipalId, DataFactorySecretBaseDefinition servicePrincipalKey, DataFactoryElement tenant, DataFactoryElement clusterResourceGroup, DataFactoryElement clusterNamePrefix, DataFactoryElement clusterUserName, DataFactorySecretBaseDefinition clusterPassword, DataFactoryElement clusterSshUserName, DataFactorySecretBaseDefinition clusterSshPassword, IList additionalLinkedServiceNames, DataFactoryLinkedServiceReference hcatalogLinkedServiceName, DataFactoryElement clusterType, DataFactoryElement sparkVersion, BinaryData coreConfiguration, BinaryData hBaseConfiguration, BinaryData hdfsConfiguration, BinaryData hiveConfiguration, BinaryData mapReduceConfiguration, BinaryData oozieConfiguration, BinaryData stormConfiguration, BinaryData yarnConfiguration, string encryptedCredential, BinaryData headNodeSize, BinaryData dataNodeSize, BinaryData zookeeperNodeSize, IList scriptActions, DataFactoryElement virtualNetworkId, DataFactoryElement subnetName, DataFactoryCredentialReference credential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal HDInsightOnDemandLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement clusterSize, DataFactoryElement timeToLiveExpression, DataFactoryElement version, DataFactoryLinkedServiceReference linkedServiceName, DataFactoryElement hostSubscriptionId, DataFactoryElement servicePrincipalId, DataFactorySecretBaseDefinition servicePrincipalKey, DataFactoryElement tenant, DataFactoryElement clusterResourceGroup, DataFactoryElement clusterNamePrefix, DataFactoryElement clusterUserName, DataFactorySecretBaseDefinition clusterPassword, DataFactoryElement clusterSshUserName, DataFactorySecretBaseDefinition clusterSshPassword, IList additionalLinkedServiceNames, DataFactoryLinkedServiceReference hcatalogLinkedServiceName, DataFactoryElement clusterType, DataFactoryElement sparkVersion, BinaryData coreConfiguration, BinaryData hBaseConfiguration, BinaryData hdfsConfiguration, BinaryData hiveConfiguration, BinaryData mapReduceConfiguration, BinaryData oozieConfiguration, BinaryData stormConfiguration, BinaryData yarnConfiguration, string encryptedCredential, BinaryData headNodeSize, BinaryData dataNodeSize, BinaryData zookeeperNodeSize, IList scriptActions, DataFactoryElement virtualNetworkId, DataFactoryElement subnetName, DataFactoryCredentialReference credential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { ClusterSize = clusterSize; TimeToLiveExpression = timeToLiveExpression; @@ -126,8 +126,8 @@ internal HDInsightOnDemandLinkedService(string linkedServiceType, IntegrationRun LinkedServiceType = linkedServiceType ?? "HDInsightOnDemand"; } - /// Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). - public DataFactoryElement ClusterSize { get; set; } + /// Number of worker/data nodes in the cluster. Suggestion value: 4. Type: int (or Expression with resultType int). + public DataFactoryElement ClusterSize { get; set; } /// The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). public DataFactoryElement TimeToLiveExpression { get; set; } /// Version of the HDInsight cluster.  Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.Serialization.cs index ef889a7cdd2c2..70d076e053551 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.Serialization.cs @@ -82,14 +82,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(Arguments)) { writer.WritePropertyName("arguments"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(Arguments); -#else - using (JsonDocument document = JsonDocument.Parse(Arguments)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, Arguments); } if (Optional.IsDefined(GetDebugInfo)) { @@ -161,7 +154,7 @@ internal static HDInsightPigActivity DeserializeHDInsightPigActivity(JsonElement Optional> dependsOn = default; Optional> userProperties = default; Optional> storageLinkedServices = default; - Optional arguments = default; + Optional>> arguments = default; Optional getDebugInfo = default; Optional> scriptPath = default; Optional scriptLinkedService = default; @@ -278,7 +271,7 @@ internal static HDInsightPigActivity DeserializeHDInsightPigActivity(JsonElement { continue; } - arguments = BinaryData.FromString(property0.Value.GetRawText()); + arguments = JsonSerializer.Deserialize>>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("getDebugInfo"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.cs index dc33f6f0cd0cd..943812a5d9938 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HDInsightPigActivity.cs @@ -44,7 +44,7 @@ public HDInsightPigActivity(string name) : base(name) /// Script path. Type: string (or Expression with resultType string). /// Script linked service reference. /// Allows user to specify defines for Pig job request. - internal HDInsightPigActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, IList storageLinkedServices, BinaryData arguments, HDInsightActivityDebugInfoOptionSetting? getDebugInfo, DataFactoryElement scriptPath, DataFactoryLinkedServiceReference scriptLinkedService, IDictionary defines) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) + internal HDInsightPigActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, IList storageLinkedServices, DataFactoryElement> arguments, HDInsightActivityDebugInfoOptionSetting? getDebugInfo, DataFactoryElement scriptPath, DataFactoryLinkedServiceReference scriptLinkedService, IDictionary defines) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) { StorageLinkedServices = storageLinkedServices; Arguments = arguments; @@ -57,37 +57,8 @@ internal HDInsightPigActivity(string name, string activityType, string descripti /// Storage linked service references. public IList StorageLinkedServices { get; } - /// - /// User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData Arguments { get; set; } + /// User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). + public DataFactoryElement> Arguments { get; set; } /// Debug info option. public HDInsightActivityDebugInfoOptionSetting? GetDebugInfo { get; set; } /// Script path. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.Serialization.cs index 3a34c16497e00..328cc02bc1fc6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.Serialization.cs @@ -138,7 +138,7 @@ internal static HttpLinkedService DeserializeHttpLinkedService(JsonElement eleme Optional authenticationType = default; Optional> userName = default; Optional password = default; - Optional> authHeaders = default; + Optional>> authHeaders = default; Optional> embeddedCertData = default; Optional> certThumbprint = default; Optional encryptedCredential = default; @@ -248,7 +248,7 @@ internal static HttpLinkedService DeserializeHttpLinkedService(JsonElement eleme { continue; } - authHeaders = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + authHeaders = JsonSerializer.Deserialize>>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("embeddedCertData"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.cs index b561f7fdc90f9..3a1fdcd726272 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/HttpLinkedService.cs @@ -37,12 +37,12 @@ public HttpLinkedService(DataFactoryElement uri) /// The authentication type to be used to connect to the HTTP server. /// User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). /// Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. - /// The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). + /// The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value should be string type). /// Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). /// Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. /// If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). - internal HttpLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement uri, HttpAuthenticationType? authenticationType, DataFactoryElement userName, DataFactorySecretBaseDefinition password, DataFactoryElement authHeaders, DataFactoryElement embeddedCertData, DataFactoryElement certThumbprint, string encryptedCredential, DataFactoryElement enableServerCertificateValidation) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal HttpLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement uri, HttpAuthenticationType? authenticationType, DataFactoryElement userName, DataFactorySecretBaseDefinition password, DataFactoryElement> authHeaders, DataFactoryElement embeddedCertData, DataFactoryElement certThumbprint, string encryptedCredential, DataFactoryElement enableServerCertificateValidation) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { Uri = uri; AuthenticationType = authenticationType; @@ -64,8 +64,8 @@ internal HttpLinkedService(string linkedServiceType, IntegrationRuntimeReference public DataFactoryElement UserName { get; set; } /// Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. public DataFactorySecretBaseDefinition Password { get; set; } - /// The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - public DataFactoryElement AuthHeaders { get; set; } + /// The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value should be string type). + public DataFactoryElement> AuthHeaders { get; set; } /// Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). public DataFactoryElement EmbeddedCertData { get; set; } /// Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.Serialization.cs index 76eaab6103f73..3452dc07c07f1 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.Serialization.cs @@ -35,6 +35,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -60,6 +70,7 @@ internal static LakeHouseWriteSettings DeserializeLakeHouseWriteSettings(JsonEle Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -96,10 +107,24 @@ internal static LakeHouseWriteSettings DeserializeLakeHouseWriteSettings(JsonEle copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new LakeHouseWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties); + return new LakeHouseWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs index ebeb07e44babc..636d6c758f4d3 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LakeHouseWriteSettings.cs @@ -25,8 +25,9 @@ public LakeHouseWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. - internal LakeHouseWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal LakeHouseWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { StoreWriteSettingsType = storeWriteSettingsType ?? "LakeHouseWriteSettings"; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LookupActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LookupActivity.cs index cbdb466cb4408..86a9cb4c3646e 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LookupActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/LookupActivity.cs @@ -20,7 +20,7 @@ public partial class LookupActivity : ExecutionActivity /// /// Dataset-specific source properties, same as copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// Lookup activity dataset reference. /// , or is null. @@ -49,7 +49,7 @@ public LookupActivity(string name, CopyActivitySource source, DatasetReference d /// /// Dataset-specific source properties, same as copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// /// Lookup activity dataset reference. /// Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). @@ -64,7 +64,7 @@ internal LookupActivity(string name, string activityType, string description, Pi /// /// Dataset-specific source properties, same as copy activity source. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public CopyActivitySource Source { get; set; } /// Lookup activity dataset reference. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.Serialization.cs index ee2c293faa714..19d8b4231cdf3 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.Serialization.cs @@ -65,14 +65,39 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) } writer.WritePropertyName("typeProperties"u8); writer.WriteStartObject(); + if (Optional.IsDefined(DriverVersion)) + { + writer.WritePropertyName("driverVersion"u8); + JsonSerializer.Serialize(writer, DriverVersion); + } if (Optional.IsDefined(ConnectionString)) { writer.WritePropertyName("connectionString"u8); JsonSerializer.Serialize(writer, ConnectionString); } + if (Optional.IsDefined(Server)) + { + writer.WritePropertyName("server"u8); + JsonSerializer.Serialize(writer, Server); + } + if (Optional.IsDefined(Port)) + { + writer.WritePropertyName("port"u8); + JsonSerializer.Serialize(writer, Port); + } + if (Optional.IsDefined(Username)) + { + writer.WritePropertyName("username"u8); + JsonSerializer.Serialize(writer, Username); + } + if (Optional.IsDefined(Database)) + { + writer.WritePropertyName("database"u8); + JsonSerializer.Serialize(writer, Database); + } if (Optional.IsDefined(Password)) { - writer.WritePropertyName("pwd"u8); + writer.WritePropertyName("password"u8); JsonSerializer.Serialize(writer, Password); } if (Optional.IsDefined(EncryptedCredential)) @@ -107,7 +132,12 @@ internal static MariaDBLinkedService DeserializeMariaDBLinkedService(JsonElement Optional description = default; Optional> parameters = default; Optional> annotations = default; + Optional> driverVersion = default; Optional> connectionString = default; + Optional> server = default; + Optional> port = default; + Optional> username = default; + Optional> database = default; Optional password = default; Optional encryptedCredential = default; IDictionary additionalProperties = default; @@ -177,6 +207,15 @@ internal static MariaDBLinkedService DeserializeMariaDBLinkedService(JsonElement } foreach (var property0 in property.Value.EnumerateObject()) { + if (property0.NameEquals("driverVersion"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + driverVersion = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("connectionString"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) @@ -186,7 +225,43 @@ internal static MariaDBLinkedService DeserializeMariaDBLinkedService(JsonElement connectionString = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } - if (property0.NameEquals("pwd"u8)) + if (property0.NameEquals("server"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + server = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("port"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + port = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("username"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + username = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("database"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + database = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("password"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) { @@ -206,7 +281,7 @@ internal static MariaDBLinkedService DeserializeMariaDBLinkedService(JsonElement additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new MariaDBLinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(parameters), Optional.ToList(annotations), additionalProperties, connectionString.Value, password, encryptedCredential.Value); + return new MariaDBLinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(parameters), Optional.ToList(annotations), additionalProperties, driverVersion.Value, connectionString.Value, server.Value, port.Value, username.Value, database.Value, password, encryptedCredential.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.cs index c07af3c14e3d9..4cddd8252f68c 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MariaDBLinkedService.cs @@ -27,19 +27,39 @@ public MariaDBLinkedService() /// Parameters for linked service. /// List of tags that can be used for describing the linked service. /// Additional Properties. + /// The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only support connection string. /// An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + /// Server name for connection. Type: string. + /// The port for the connection. Type: integer. + /// Username for authentication. Type: string. + /// Database name for connection. Type: string. /// The Azure key vault secret reference of password in connection string. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal MariaDBLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryKeyVaultSecretReference password, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal MariaDBLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement driverVersion, DataFactoryElement connectionString, DataFactoryElement server, DataFactoryElement port, DataFactoryElement username, DataFactoryElement database, DataFactoryKeyVaultSecretReference password, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { + DriverVersion = driverVersion; ConnectionString = connectionString; + Server = server; + Port = port; + Username = username; + Database = database; Password = password; EncryptedCredential = encryptedCredential; LinkedServiceType = linkedServiceType ?? "MariaDB"; } + /// The version of the MariaDB driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only support connection string. + public DataFactoryElement DriverVersion { get; set; } /// An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. public DataFactoryElement ConnectionString { get; set; } + /// Server name for connection. Type: string. + public DataFactoryElement Server { get; set; } + /// The port for the connection. Type: integer. + public DataFactoryElement Port { get; set; } + /// Username for authentication. Type: string. + public DataFactoryElement Username { get; set; } + /// Database name for connection. Type: string. + public DataFactoryElement Database { get; set; } /// The Azure key vault secret reference of password in connection string. public DataFactoryKeyVaultSecretReference Password { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.Serialization.cs index 204982e82d102..4e665201443b6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.Serialization.cs @@ -65,8 +65,46 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) } writer.WritePropertyName("typeProperties"u8); writer.WriteStartObject(); - writer.WritePropertyName("connectionString"u8); - JsonSerializer.Serialize(writer, ConnectionString); + if (Optional.IsDefined(DriverVersion)) + { + writer.WritePropertyName("driverVersion"u8); + JsonSerializer.Serialize(writer, DriverVersion); + } + if (Optional.IsDefined(ConnectionString)) + { + writer.WritePropertyName("connectionString"u8); + JsonSerializer.Serialize(writer, ConnectionString); + } + if (Optional.IsDefined(Server)) + { + writer.WritePropertyName("server"u8); + JsonSerializer.Serialize(writer, Server); + } + if (Optional.IsDefined(Port)) + { + writer.WritePropertyName("port"u8); + JsonSerializer.Serialize(writer, Port); + } + if (Optional.IsDefined(Username)) + { + writer.WritePropertyName("username"u8); + JsonSerializer.Serialize(writer, Username); + } + if (Optional.IsDefined(Database)) + { + writer.WritePropertyName("database"u8); + JsonSerializer.Serialize(writer, Database); + } + if (Optional.IsDefined(SslMode)) + { + writer.WritePropertyName("sslMode"u8); + JsonSerializer.Serialize(writer, SslMode); + } + if (Optional.IsDefined(UseSystemTrustStore)) + { + writer.WritePropertyName("useSystemTrustStore"u8); + JsonSerializer.Serialize(writer, UseSystemTrustStore); + } if (Optional.IsDefined(Password)) { writer.WritePropertyName("password"u8); @@ -104,7 +142,14 @@ internal static MySqlLinkedService DeserializeMySqlLinkedService(JsonElement ele Optional description = default; Optional> parameters = default; Optional> annotations = default; - DataFactoryElement connectionString = default; + Optional> driverVersion = default; + Optional> connectionString = default; + Optional> server = default; + Optional> port = default; + Optional> username = default; + Optional> database = default; + Optional> sslMode = default; + Optional> useSystemTrustStore = default; Optional password = default; Optional encryptedCredential = default; IDictionary additionalProperties = default; @@ -174,11 +219,78 @@ internal static MySqlLinkedService DeserializeMySqlLinkedService(JsonElement ele } foreach (var property0 in property.Value.EnumerateObject()) { + if (property0.NameEquals("driverVersion"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + driverVersion = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("connectionString"u8)) { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } connectionString = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } + if (property0.NameEquals("server"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + server = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("port"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + port = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("username"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + username = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("database"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + database = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("sslMode"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sslMode = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("useSystemTrustStore"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + useSystemTrustStore = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } if (property0.NameEquals("password"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) @@ -199,7 +311,7 @@ internal static MySqlLinkedService DeserializeMySqlLinkedService(JsonElement ele additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new MySqlLinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(parameters), Optional.ToList(annotations), additionalProperties, connectionString, password, encryptedCredential.Value); + return new MySqlLinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(parameters), Optional.ToList(annotations), additionalProperties, driverVersion.Value, connectionString.Value, server.Value, port.Value, username.Value, database.Value, sslMode.Value, useSystemTrustStore.Value, password, encryptedCredential.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.cs index 7617518f93d1d..70ac6c89948f4 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/MySqlLinkedService.cs @@ -7,7 +7,6 @@ using System; using System.Collections.Generic; -using Azure.Core; using Azure.Core.Expressions.DataFactory; namespace Azure.ResourceManager.DataFactory.Models @@ -16,13 +15,8 @@ namespace Azure.ResourceManager.DataFactory.Models public partial class MySqlLinkedService : DataFactoryLinkedServiceProperties { /// Initializes a new instance of . - /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. - /// is null. - public MySqlLinkedService(DataFactoryElement connectionString) + public MySqlLinkedService() { - Argument.AssertNotNull(connectionString, nameof(connectionString)); - - ConnectionString = connectionString; LinkedServiceType = "MySql"; } @@ -33,19 +27,47 @@ public MySqlLinkedService(DataFactoryElement connectionString) /// Parameters for linked service. /// List of tags that can be used for describing the linked service. /// Additional Properties. + /// The version of the MySQL driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only support connection string. /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + /// Server name for connection. Type: string. + /// The port for the connection. Type: integer. + /// Username for authentication. Type: string. + /// Database name for connection. Type: string. + /// SSL mode for connection. Type: integer. 0: disable, 1: prefer, 2: require, 3: verify-ca, 4: verify-full. + /// Use system trust store for connection. Type: integer. 0: enable, 1: disable. /// The Azure key vault secret reference of password in connection string. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal MySqlLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryKeyVaultSecretReference password, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal MySqlLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement driverVersion, DataFactoryElement connectionString, DataFactoryElement server, DataFactoryElement port, DataFactoryElement username, DataFactoryElement database, DataFactoryElement sslMode, DataFactoryElement useSystemTrustStore, DataFactoryKeyVaultSecretReference password, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { + DriverVersion = driverVersion; ConnectionString = connectionString; + Server = server; + Port = port; + Username = username; + Database = database; + SslMode = sslMode; + UseSystemTrustStore = useSystemTrustStore; Password = password; EncryptedCredential = encryptedCredential; LinkedServiceType = linkedServiceType ?? "MySql"; } + /// The version of the MySQL driver. Type: string. V1 or empty for legacy driver, V2 for new driver. V1 can support connection string and property bag, V2 can only support connection string. + public DataFactoryElement DriverVersion { get; set; } /// The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. public DataFactoryElement ConnectionString { get; set; } + /// Server name for connection. Type: string. + public DataFactoryElement Server { get; set; } + /// The port for the connection. Type: integer. + public DataFactoryElement Port { get; set; } + /// Username for authentication. Type: string. + public DataFactoryElement Username { get; set; } + /// Database name for connection. Type: string. + public DataFactoryElement Database { get; set; } + /// SSL mode for connection. Type: integer. 0: disable, 1: prefer, 2: require, 3: verify-ca, 4: verify-full. + public DataFactoryElement SslMode { get; set; } + /// Use system trust store for connection. Type: integer. 0: enable, 1: disable. + public DataFactoryElement UseSystemTrustStore { get; set; } /// The Azure key vault secret reference of password in connection string. public DataFactoryKeyVaultSecretReference Password { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.Serialization.cs index 2c2108ba71ac6..b8101544c9aae 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.Serialization.cs @@ -163,7 +163,7 @@ internal static ODataLinkedService DeserializeODataLinkedService(JsonElement ele Optional authenticationType = default; Optional> userName = default; Optional password = default; - Optional> authHeaders = default; + Optional>> authHeaders = default; Optional> tenant = default; Optional> servicePrincipalId = default; Optional> azureCloudType = default; @@ -278,7 +278,7 @@ internal static ODataLinkedService DeserializeODataLinkedService(JsonElement ele { continue; } - authHeaders = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + authHeaders = JsonSerializer.Deserialize>>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("tenant"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.cs index bcafa571799de..528cc3fdbe9b4 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/ODataLinkedService.cs @@ -37,7 +37,7 @@ public ODataLinkedService(DataFactoryElement uri) /// Type of authentication used to connect to the OData service. /// User name of the OData service. Type: string (or Expression with resultType string). /// Password of the OData service. - /// The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). + /// The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value should be string type). /// Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). /// Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). /// Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). @@ -47,7 +47,7 @@ public ODataLinkedService(DataFactoryElement uri) /// Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). /// Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal ODataLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement uri, ODataAuthenticationType? authenticationType, DataFactoryElement userName, DataFactorySecretBaseDefinition password, DataFactoryElement authHeaders, DataFactoryElement tenant, DataFactoryElement servicePrincipalId, DataFactoryElement azureCloudType, DataFactoryElement aadResourceId, ODataAadServicePrincipalCredentialType? aadServicePrincipalCredentialType, DataFactorySecretBaseDefinition servicePrincipalKey, DataFactorySecretBaseDefinition servicePrincipalEmbeddedCert, DataFactorySecretBaseDefinition servicePrincipalEmbeddedCertPassword, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal ODataLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement uri, ODataAuthenticationType? authenticationType, DataFactoryElement userName, DataFactorySecretBaseDefinition password, DataFactoryElement> authHeaders, DataFactoryElement tenant, DataFactoryElement servicePrincipalId, DataFactoryElement azureCloudType, DataFactoryElement aadResourceId, ODataAadServicePrincipalCredentialType? aadServicePrincipalCredentialType, DataFactorySecretBaseDefinition servicePrincipalKey, DataFactorySecretBaseDefinition servicePrincipalEmbeddedCert, DataFactorySecretBaseDefinition servicePrincipalEmbeddedCertPassword, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { Uri = uri; AuthenticationType = authenticationType; @@ -74,8 +74,8 @@ internal ODataLinkedService(string linkedServiceType, IntegrationRuntimeReferenc public DataFactoryElement UserName { get; set; } /// Password of the OData service. public DataFactorySecretBaseDefinition Password { get; set; } - /// The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType object). - public DataFactoryElement AuthHeaders { get; set; } + /// The additional HTTP headers in the request to RESTful API used for authorization. Type: key value pairs (value should be string type). + public DataFactoryElement> AuthHeaders { get; set; } /// Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). public DataFactoryElement Tenant { get; set; } /// Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.Serialization.cs index a053f2147f4a0..7a559f6809e63 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.Serialization.cs @@ -48,14 +48,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(HttpCompressionType)) { writer.WritePropertyName("httpCompressionType"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(HttpCompressionType); -#else - using (JsonDocument document = JsonDocument.Parse(HttpCompressionType)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, HttpCompressionType); } writer.WritePropertyName("type"u8); writer.WriteStringValue(CopySinkType); @@ -111,10 +104,10 @@ internal static RestSink DeserializeRestSink(JsonElement element) return null; } Optional> requestMethod = default; - Optional> additionalHeaders = default; + Optional>> additionalHeaders = default; Optional> httpRequestTimeout = default; Optional requestInterval = default; - Optional httpCompressionType = default; + Optional> httpCompressionType = default; string type = default; Optional> writeBatchSize = default; Optional> writeBatchTimeout = default; @@ -141,7 +134,7 @@ internal static RestSink DeserializeRestSink(JsonElement element) { continue; } - additionalHeaders = JsonSerializer.Deserialize>(property.Value.GetRawText()); + additionalHeaders = JsonSerializer.Deserialize>>(property.Value.GetRawText()); continue; } if (property.NameEquals("httpRequestTimeout"u8)) @@ -168,7 +161,7 @@ internal static RestSink DeserializeRestSink(JsonElement element) { continue; } - httpCompressionType = BinaryData.FromString(property.Value.GetRawText()); + httpCompressionType = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("type"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.cs index e72b30d638d30..305162a6b9576 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSink.cs @@ -30,11 +30,11 @@ public RestSink() /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// Additional Properties. /// The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). - /// The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + /// The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string type). /// The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). /// The time to await before sending next request, in milliseconds. - /// Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - internal RestSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement requestMethod, DataFactoryElement additionalHeaders, DataFactoryElement httpRequestTimeout, BinaryData requestInterval, BinaryData httpCompressionType) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + /// Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string (or Expression with resultType string). + internal RestSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement requestMethod, DataFactoryElement> additionalHeaders, DataFactoryElement httpRequestTimeout, BinaryData requestInterval, DataFactoryElement httpCompressionType) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { RequestMethod = requestMethod; AdditionalHeaders = additionalHeaders; @@ -46,8 +46,8 @@ internal RestSink(string copySinkType, DataFactoryElement writeBatchSize, D /// The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). public DataFactoryElement RequestMethod { get; set; } - /// The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - public DataFactoryElement AdditionalHeaders { get; set; } + /// The additional HTTP headers in the request to the RESTful API. Type: key value pairs (value should be string type). + public DataFactoryElement> AdditionalHeaders { get; set; } /// The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). public DataFactoryElement HttpRequestTimeout { get; set; } /// @@ -81,36 +81,7 @@ internal RestSink(string copySinkType, DataFactoryElement writeBatchSize, D /// /// public BinaryData RequestInterval { get; set; } - /// - /// Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData HttpCompressionType { get; set; } + /// Http Compression Type to Send data in compressed format with Optimal Compression Level, Default is None. And The Only Supported option is Gzip. Type: string (or Expression with resultType string). + public DataFactoryElement HttpCompressionType { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.Serialization.cs index 4262fd568cbcd..03bebec0ef5b7 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.Serialization.cs @@ -58,14 +58,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(AdditionalColumns)) { writer.WritePropertyName("additionalColumns"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(AdditionalColumns); -#else - using (JsonDocument document = JsonDocument.Parse(AdditionalColumns)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, AdditionalColumns); } writer.WritePropertyName("type"u8); writer.WriteStringValue(CopySourceType); @@ -116,7 +109,7 @@ internal static RestSource DeserializeRestSource(JsonElement element) Optional> paginationRules = default; Optional> httpRequestTimeout = default; Optional requestInterval = default; - Optional additionalColumns = default; + Optional>> additionalColumns = default; string type = default; Optional> sourceRetryCount = default; Optional> sourceRetryWait = default; @@ -186,7 +179,7 @@ internal static RestSource DeserializeRestSource(JsonElement element) { continue; } - additionalColumns = BinaryData.FromString(property.Value.GetRawText()); + additionalColumns = JsonSerializer.Deserialize>>(property.Value.GetRawText()); continue; } if (property.NameEquals("type"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.cs index d167a0fdfe72e..585ff85922310 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/RestSource.cs @@ -33,8 +33,8 @@ public RestSource() /// The pagination rules to compose next page requests. Type: string (or Expression with resultType string). /// The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). /// The time to await before sending next page request. - /// Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - internal RestSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement requestMethod, DataFactoryElement requestBody, DataFactoryElement additionalHeaders, DataFactoryElement paginationRules, DataFactoryElement httpRequestTimeout, BinaryData requestInterval, BinaryData additionalColumns) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + /// Specifies the additional columns to be added to source data. Type: key value pairs (value should be string type). + internal RestSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement requestMethod, DataFactoryElement requestBody, DataFactoryElement additionalHeaders, DataFactoryElement paginationRules, DataFactoryElement httpRequestTimeout, BinaryData requestInterval, DataFactoryElement> additionalColumns) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { RequestMethod = requestMethod; RequestBody = requestBody; @@ -87,36 +87,7 @@ internal RestSource(string copySourceType, DataFactoryElement sourceRetryCo /// /// public BinaryData RequestInterval { get; set; } - /// - /// Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData AdditionalColumns { get; set; } + /// Specifies the additional columns to be added to source data. Type: key value pairs (value should be string type). + public DataFactoryElement> AdditionalColumns { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2LinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2LinkedService.Serialization.cs new file mode 100644 index 0000000000000..78c333419e985 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2LinkedService.Serialization.cs @@ -0,0 +1,242 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceServiceCloudV2LinkedService : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(LinkedServiceType); + if (Optional.IsDefined(ConnectVia)) + { + writer.WritePropertyName("connectVia"u8); + writer.WriteObjectValue(ConnectVia); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsCollectionDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteStartObject(); + foreach (var item in Parameters) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Annotations)) + { + writer.WritePropertyName("annotations"u8); + writer.WriteStartArray(); + foreach (var item in Annotations) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item); +#else + using (JsonDocument document = JsonDocument.Parse(item)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndArray(); + } + writer.WritePropertyName("typeProperties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(EnvironmentUri)) + { + writer.WritePropertyName("environmentUrl"u8); + JsonSerializer.Serialize(writer, EnvironmentUri); + } + if (Optional.IsDefined(ClientId)) + { + writer.WritePropertyName("clientId"u8); + JsonSerializer.Serialize(writer, ClientId); + } + if (Optional.IsDefined(ClientSecret)) + { + writer.WritePropertyName("clientSecret"u8); + JsonSerializer.Serialize(writer, ClientSecret); + } + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("apiVersion"u8); + JsonSerializer.Serialize(writer, ApiVersion); + } + if (Optional.IsDefined(EncryptedCredential)) + { + writer.WritePropertyName("encryptedCredential"u8); + writer.WriteStringValue(EncryptedCredential); + } + writer.WriteEndObject(); + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceServiceCloudV2LinkedService DeserializeSalesforceServiceCloudV2LinkedService(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string type = default; + Optional connectVia = default; + Optional description = default; + Optional> parameters = default; + Optional> annotations = default; + Optional> environmentUrl = default; + Optional> clientId = default; + Optional clientSecret = default; + Optional> apiVersion = default; + Optional encryptedCredential = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("connectVia"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + connectVia = IntegrationRuntimeReference.DeserializeIntegrationRuntimeReference(property.Value); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, EntityParameterSpecification.DeserializeEntityParameterSpecification(property0.Value)); + } + parameters = dictionary; + continue; + } + if (property.NameEquals("annotations"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(BinaryData.FromString(item.GetRawText())); + } + } + annotations = array; + continue; + } + if (property.NameEquals("typeProperties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("environmentUrl"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + environmentUrl = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("clientId"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + clientId = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("clientSecret"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + clientSecret = JsonSerializer.Deserialize(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("apiVersion"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + apiVersion = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("encryptedCredential"u8)) + { + encryptedCredential = property0.Value.GetString(); + continue; + } + } + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceServiceCloudV2LinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(parameters), Optional.ToList(annotations), additionalProperties, environmentUrl.Value, clientId.Value, clientSecret, apiVersion.Value, encryptedCredential.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2LinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2LinkedService.cs new file mode 100644 index 0000000000000..d32cf6cedf35e --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2LinkedService.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// Linked service for Salesforce Service Cloud V2. + public partial class SalesforceServiceCloudV2LinkedService : DataFactoryLinkedServiceProperties + { + /// Initializes a new instance of . + public SalesforceServiceCloudV2LinkedService() + { + LinkedServiceType = "SalesforceServiceCloudV2"; + } + + /// Initializes a new instance of . + /// Type of linked service. + /// The integration runtime reference. + /// Linked service description. + /// Parameters for linked service. + /// List of tags that can be used for describing the linked service. + /// Additional Properties. + /// The URL of Salesforce Service Cloud instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + /// The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or Expression with resultType string). + /// The client secret for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. + /// The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with resultType string). + /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. + internal SalesforceServiceCloudV2LinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement environmentUri, DataFactoryElement clientId, DataFactorySecretBaseDefinition clientSecret, DataFactoryElement apiVersion, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + { + EnvironmentUri = environmentUri; + ClientId = clientId; + ClientSecret = clientSecret; + ApiVersion = apiVersion; + EncryptedCredential = encryptedCredential; + LinkedServiceType = linkedServiceType ?? "SalesforceServiceCloudV2"; + } + + /// The URL of Salesforce Service Cloud instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + public DataFactoryElement EnvironmentUri { get; set; } + /// The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or Expression with resultType string). + public DataFactoryElement ClientId { get; set; } + /// The client secret for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. + public DataFactorySecretBaseDefinition ClientSecret { get; set; } + /// The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with resultType string). + public DataFactoryElement ApiVersion { get; set; } + /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. + public string EncryptedCredential { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2ObjectDataset.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2ObjectDataset.Serialization.cs new file mode 100644 index 0000000000000..bc4d3e88a31c4 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2ObjectDataset.Serialization.cs @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceServiceCloudV2ObjectDataset : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(DatasetType); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(Structure)) + { + writer.WritePropertyName("structure"u8); + JsonSerializer.Serialize(writer, Structure); + } + if (Optional.IsDefined(Schema)) + { + writer.WritePropertyName("schema"u8); + JsonSerializer.Serialize(writer, Schema); + } + writer.WritePropertyName("linkedServiceName"u8); + JsonSerializer.Serialize(writer, LinkedServiceName); + if (Optional.IsCollectionDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteStartObject(); + foreach (var item in Parameters) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Annotations)) + { + writer.WritePropertyName("annotations"u8); + writer.WriteStartArray(); + foreach (var item in Annotations) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item); +#else + using (JsonDocument document = JsonDocument.Parse(item)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Folder)) + { + writer.WritePropertyName("folder"u8); + writer.WriteObjectValue(Folder); + } + writer.WritePropertyName("typeProperties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(ObjectApiName)) + { + writer.WritePropertyName("objectApiName"u8); + JsonSerializer.Serialize(writer, ObjectApiName); + } + if (Optional.IsDefined(ReportId)) + { + writer.WritePropertyName("reportId"u8); + JsonSerializer.Serialize(writer, ReportId); + } + writer.WriteEndObject(); + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceServiceCloudV2ObjectDataset DeserializeSalesforceServiceCloudV2ObjectDataset(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string type = default; + Optional description = default; + Optional>> structure = default; + Optional>> schema = default; + DataFactoryLinkedServiceReference linkedServiceName = default; + Optional> parameters = default; + Optional> annotations = default; + Optional folder = default; + Optional> objectApiName = default; + Optional> reportId = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("structure"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + structure = JsonSerializer.Deserialize>>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("schema"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + schema = JsonSerializer.Deserialize>>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("linkedServiceName"u8)) + { + linkedServiceName = JsonSerializer.Deserialize(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, EntityParameterSpecification.DeserializeEntityParameterSpecification(property0.Value)); + } + parameters = dictionary; + continue; + } + if (property.NameEquals("annotations"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(BinaryData.FromString(item.GetRawText())); + } + } + annotations = array; + continue; + } + if (property.NameEquals("folder"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + folder = DatasetFolder.DeserializeDatasetFolder(property.Value); + continue; + } + if (property.NameEquals("typeProperties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("objectApiName"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + objectApiName = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("reportId"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + reportId = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + } + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceServiceCloudV2ObjectDataset(type, description.Value, structure.Value, schema.Value, linkedServiceName, Optional.ToDictionary(parameters), Optional.ToList(annotations), folder.Value, additionalProperties, objectApiName.Value, reportId.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2ObjectDataset.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2ObjectDataset.cs new file mode 100644 index 0000000000000..8039512cfd3b7 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2ObjectDataset.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// The Salesforce Service Cloud V2 object dataset. + public partial class SalesforceServiceCloudV2ObjectDataset : DataFactoryDatasetProperties + { + /// Initializes a new instance of . + /// Linked service reference. + /// is null. + public SalesforceServiceCloudV2ObjectDataset(DataFactoryLinkedServiceReference linkedServiceName) : base(linkedServiceName) + { + Argument.AssertNotNull(linkedServiceName, nameof(linkedServiceName)); + + DatasetType = "SalesforceServiceCloudV2Object"; + } + + /// Initializes a new instance of . + /// Type of dataset. + /// Dataset description. + /// Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. + /// Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. + /// Linked service reference. + /// Parameters for dataset. + /// List of tags that can be used for describing the Dataset. + /// The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + /// Additional Properties. + /// The Salesforce Service Cloud V2 object API name. Type: string (or Expression with resultType string). + /// The Salesforce Service Cloud V2 reportId. Type: string (or Expression with resultType string). + internal SalesforceServiceCloudV2ObjectDataset(string datasetType, string description, DataFactoryElement> structure, DataFactoryElement> schema, DataFactoryLinkedServiceReference linkedServiceName, IDictionary parameters, IList annotations, DatasetFolder folder, IDictionary additionalProperties, DataFactoryElement objectApiName, DataFactoryElement reportId) : base(datasetType, description, structure, schema, linkedServiceName, parameters, annotations, folder, additionalProperties) + { + ObjectApiName = objectApiName; + ReportId = reportId; + DatasetType = datasetType ?? "SalesforceServiceCloudV2Object"; + } + + /// The Salesforce Service Cloud V2 object API name. Type: string (or Expression with resultType string). + public DataFactoryElement ObjectApiName { get; set; } + /// The Salesforce Service Cloud V2 reportId. Type: string (or Expression with resultType string). + public DataFactoryElement ReportId { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Sink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Sink.Serialization.cs new file mode 100644 index 0000000000000..67c11ab998655 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Sink.Serialization.cs @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceServiceCloudV2Sink : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(WriteBehavior)) + { + writer.WritePropertyName("writeBehavior"u8); + writer.WriteStringValue(WriteBehavior.Value.ToString()); + } + if (Optional.IsDefined(ExternalIdFieldName)) + { + writer.WritePropertyName("externalIdFieldName"u8); + JsonSerializer.Serialize(writer, ExternalIdFieldName); + } + if (Optional.IsDefined(IgnoreNullValues)) + { + writer.WritePropertyName("ignoreNullValues"u8); + JsonSerializer.Serialize(writer, IgnoreNullValues); + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(CopySinkType); + if (Optional.IsDefined(WriteBatchSize)) + { + writer.WritePropertyName("writeBatchSize"u8); + JsonSerializer.Serialize(writer, WriteBatchSize); + } + if (Optional.IsDefined(WriteBatchTimeout)) + { + writer.WritePropertyName("writeBatchTimeout"u8); + JsonSerializer.Serialize(writer, WriteBatchTimeout); + } + if (Optional.IsDefined(SinkRetryCount)) + { + writer.WritePropertyName("sinkRetryCount"u8); + JsonSerializer.Serialize(writer, SinkRetryCount); + } + if (Optional.IsDefined(SinkRetryWait)) + { + writer.WritePropertyName("sinkRetryWait"u8); + JsonSerializer.Serialize(writer, SinkRetryWait); + } + if (Optional.IsDefined(MaxConcurrentConnections)) + { + writer.WritePropertyName("maxConcurrentConnections"u8); + JsonSerializer.Serialize(writer, MaxConcurrentConnections); + } + if (Optional.IsDefined(DisableMetricsCollection)) + { + writer.WritePropertyName("disableMetricsCollection"u8); + JsonSerializer.Serialize(writer, DisableMetricsCollection); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceServiceCloudV2Sink DeserializeSalesforceServiceCloudV2Sink(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Optional writeBehavior = default; + Optional> externalIdFieldName = default; + Optional> ignoreNullValues = default; + string type = default; + Optional> writeBatchSize = default; + Optional> writeBatchTimeout = default; + Optional> sinkRetryCount = default; + Optional> sinkRetryWait = default; + Optional> maxConcurrentConnections = default; + Optional> disableMetricsCollection = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("writeBehavior"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBehavior = new SalesforceV2SinkWriteBehavior(property.Value.GetString()); + continue; + } + if (property.NameEquals("externalIdFieldName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + externalIdFieldName = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("ignoreNullValues"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + ignoreNullValues = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("writeBatchSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBatchSize = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("writeBatchTimeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBatchTimeout = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sinkRetryCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sinkRetryCount = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sinkRetryWait"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sinkRetryWait = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("maxConcurrentConnections"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxConcurrentConnections = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("disableMetricsCollection"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disableMetricsCollection = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceServiceCloudV2Sink(type, writeBatchSize.Value, writeBatchTimeout.Value, sinkRetryCount.Value, sinkRetryWait.Value, maxConcurrentConnections.Value, disableMetricsCollection.Value, additionalProperties, Optional.ToNullable(writeBehavior), externalIdFieldName.Value, ignoreNullValues.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Sink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Sink.cs new file mode 100644 index 0000000000000..5754db0a2253c --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Sink.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// A copy activity Salesforce Service Cloud V2 sink. + public partial class SalesforceServiceCloudV2Sink : CopySink + { + /// Initializes a new instance of . + public SalesforceServiceCloudV2Sink() + { + CopySinkType = "SalesforceServiceCloudV2Sink"; + } + + /// Initializes a new instance of . + /// Copy sink type. + /// Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. + /// Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Sink retry count. Type: integer (or Expression with resultType integer). + /// Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). + /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). + /// Additional Properties. + /// The write behavior for the operation. Default is Insert. + /// The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + /// The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + internal SalesforceServiceCloudV2Sink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, SalesforceV2SinkWriteBehavior? writeBehavior, DataFactoryElement externalIdFieldName, DataFactoryElement ignoreNullValues) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + { + WriteBehavior = writeBehavior; + ExternalIdFieldName = externalIdFieldName; + IgnoreNullValues = ignoreNullValues; + CopySinkType = copySinkType ?? "SalesforceServiceCloudV2Sink"; + } + + /// The write behavior for the operation. Default is Insert. + public SalesforceV2SinkWriteBehavior? WriteBehavior { get; set; } + /// The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + public DataFactoryElement ExternalIdFieldName { get; set; } + /// The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement IgnoreNullValues { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Source.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Source.Serialization.cs new file mode 100644 index 0000000000000..38f60475f1be5 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Source.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceServiceCloudV2Source : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(SoqlQuery)) + { + writer.WritePropertyName("SOQLQuery"u8); + JsonSerializer.Serialize(writer, SoqlQuery); + } + if (Optional.IsDefined(ReadBehavior)) + { + writer.WritePropertyName("readBehavior"u8); + JsonSerializer.Serialize(writer, ReadBehavior); + } + if (Optional.IsDefined(AdditionalColumns)) + { + writer.WritePropertyName("additionalColumns"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(AdditionalColumns); +#else + using (JsonDocument document = JsonDocument.Parse(AdditionalColumns)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(CopySourceType); + if (Optional.IsDefined(SourceRetryCount)) + { + writer.WritePropertyName("sourceRetryCount"u8); + JsonSerializer.Serialize(writer, SourceRetryCount); + } + if (Optional.IsDefined(SourceRetryWait)) + { + writer.WritePropertyName("sourceRetryWait"u8); + JsonSerializer.Serialize(writer, SourceRetryWait); + } + if (Optional.IsDefined(MaxConcurrentConnections)) + { + writer.WritePropertyName("maxConcurrentConnections"u8); + JsonSerializer.Serialize(writer, MaxConcurrentConnections); + } + if (Optional.IsDefined(DisableMetricsCollection)) + { + writer.WritePropertyName("disableMetricsCollection"u8); + JsonSerializer.Serialize(writer, DisableMetricsCollection); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceServiceCloudV2Source DeserializeSalesforceServiceCloudV2Source(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Optional> soqlQuery = default; + Optional> readBehavior = default; + Optional additionalColumns = default; + string type = default; + Optional> sourceRetryCount = default; + Optional> sourceRetryWait = default; + Optional> maxConcurrentConnections = default; + Optional> disableMetricsCollection = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("SOQLQuery"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + soqlQuery = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("readBehavior"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + readBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("additionalColumns"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + additionalColumns = BinaryData.FromString(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceRetryCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sourceRetryCount = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sourceRetryWait"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sourceRetryWait = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("maxConcurrentConnections"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxConcurrentConnections = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("disableMetricsCollection"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disableMetricsCollection = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceServiceCloudV2Source(type, sourceRetryCount.Value, sourceRetryWait.Value, maxConcurrentConnections.Value, disableMetricsCollection.Value, additionalProperties, soqlQuery.Value, readBehavior.Value, additionalColumns.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Source.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Source.cs new file mode 100644 index 0000000000000..cbcabadaad841 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceServiceCloudV2Source.cs @@ -0,0 +1,77 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// A copy activity Salesforce Service Cloud V2 source. + public partial class SalesforceServiceCloudV2Source : CopyActivitySource + { + /// Initializes a new instance of . + public SalesforceServiceCloudV2Source() + { + CopySourceType = "SalesforceServiceCloudV2Source"; + } + + /// Initializes a new instance of . + /// Copy source type. + /// Source retry count. Type: integer (or Expression with resultType integer). + /// Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). + /// Additional Properties. + /// Database query. Type: string (or Expression with resultType string). + /// The read behavior for the operation. Default is query. Allowed values: query/queryAll. Type: string (or Expression with resultType string). + /// Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). + internal SalesforceServiceCloudV2Source(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement soqlQuery, DataFactoryElement readBehavior, BinaryData additionalColumns) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + { + SoqlQuery = soqlQuery; + ReadBehavior = readBehavior; + AdditionalColumns = additionalColumns; + CopySourceType = copySourceType ?? "SalesforceServiceCloudV2Source"; + } + + /// Database query. Type: string (or Expression with resultType string). + public DataFactoryElement SoqlQuery { get; set; } + /// The read behavior for the operation. Default is query. Allowed values: query/queryAll. Type: string (or Expression with resultType string). + public DataFactoryElement ReadBehavior { get; set; } + /// + /// Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). + /// + /// To assign an object to this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + public BinaryData AdditionalColumns { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2LinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2LinkedService.Serialization.cs new file mode 100644 index 0000000000000..c5dc7b4cefb20 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2LinkedService.Serialization.cs @@ -0,0 +1,242 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceV2LinkedService : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(LinkedServiceType); + if (Optional.IsDefined(ConnectVia)) + { + writer.WritePropertyName("connectVia"u8); + writer.WriteObjectValue(ConnectVia); + } + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsCollectionDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteStartObject(); + foreach (var item in Parameters) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Annotations)) + { + writer.WritePropertyName("annotations"u8); + writer.WriteStartArray(); + foreach (var item in Annotations) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item); +#else + using (JsonDocument document = JsonDocument.Parse(item)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndArray(); + } + writer.WritePropertyName("typeProperties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(EnvironmentUri)) + { + writer.WritePropertyName("environmentUrl"u8); + JsonSerializer.Serialize(writer, EnvironmentUri); + } + if (Optional.IsDefined(ClientId)) + { + writer.WritePropertyName("clientId"u8); + JsonSerializer.Serialize(writer, ClientId); + } + if (Optional.IsDefined(ClientSecret)) + { + writer.WritePropertyName("clientSecret"u8); + JsonSerializer.Serialize(writer, ClientSecret); + } + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("apiVersion"u8); + JsonSerializer.Serialize(writer, ApiVersion); + } + if (Optional.IsDefined(EncryptedCredential)) + { + writer.WritePropertyName("encryptedCredential"u8); + writer.WriteStringValue(EncryptedCredential); + } + writer.WriteEndObject(); + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceV2LinkedService DeserializeSalesforceV2LinkedService(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string type = default; + Optional connectVia = default; + Optional description = default; + Optional> parameters = default; + Optional> annotations = default; + Optional> environmentUrl = default; + Optional> clientId = default; + Optional clientSecret = default; + Optional> apiVersion = default; + Optional encryptedCredential = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("connectVia"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + connectVia = IntegrationRuntimeReference.DeserializeIntegrationRuntimeReference(property.Value); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, EntityParameterSpecification.DeserializeEntityParameterSpecification(property0.Value)); + } + parameters = dictionary; + continue; + } + if (property.NameEquals("annotations"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(BinaryData.FromString(item.GetRawText())); + } + } + annotations = array; + continue; + } + if (property.NameEquals("typeProperties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("environmentUrl"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + environmentUrl = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("clientId"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + clientId = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("clientSecret"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + clientSecret = JsonSerializer.Deserialize(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("apiVersion"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + apiVersion = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("encryptedCredential"u8)) + { + encryptedCredential = property0.Value.GetString(); + continue; + } + } + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceV2LinkedService(type, connectVia.Value, description.Value, Optional.ToDictionary(parameters), Optional.ToList(annotations), additionalProperties, environmentUrl.Value, clientId.Value, clientSecret, apiVersion.Value, encryptedCredential.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2LinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2LinkedService.cs new file mode 100644 index 0000000000000..e358ae3ceb55a --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2LinkedService.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// Linked service for Salesforce V2. + public partial class SalesforceV2LinkedService : DataFactoryLinkedServiceProperties + { + /// Initializes a new instance of . + public SalesforceV2LinkedService() + { + LinkedServiceType = "SalesforceV2"; + } + + /// Initializes a new instance of . + /// Type of linked service. + /// The integration runtime reference. + /// Linked service description. + /// Parameters for linked service. + /// List of tags that can be used for describing the linked service. + /// Additional Properties. + /// The URL of Salesforce instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + /// The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or Expression with resultType string). + /// The client secret for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. + /// The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with resultType string). + /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. + internal SalesforceV2LinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement environmentUri, DataFactoryElement clientId, DataFactorySecretBaseDefinition clientSecret, DataFactoryElement apiVersion, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + { + EnvironmentUri = environmentUri; + ClientId = clientId; + ClientSecret = clientSecret; + ApiVersion = apiVersion; + EncryptedCredential = encryptedCredential; + LinkedServiceType = linkedServiceType ?? "SalesforceV2"; + } + + /// The URL of Salesforce instance. For example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + public DataFactoryElement EnvironmentUri { get; set; } + /// The client Id for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. Type: string (or Expression with resultType string). + public DataFactoryElement ClientId { get; set; } + /// The client secret for OAuth 2.0 Client Credentials Flow authentication of the Salesforce instance. + public DataFactorySecretBaseDefinition ClientSecret { get; set; } + /// The Salesforce API version used in ADF. The version must be larger than or equal to 47.0 which is required by Salesforce BULK API 2.0. Type: string (or Expression with resultType string). + public DataFactoryElement ApiVersion { get; set; } + /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. + public string EncryptedCredential { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2ObjectDataset.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2ObjectDataset.Serialization.cs new file mode 100644 index 0000000000000..6e7a066cafa14 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2ObjectDataset.Serialization.cs @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceV2ObjectDataset : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("type"u8); + writer.WriteStringValue(DatasetType); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"u8); + writer.WriteStringValue(Description); + } + if (Optional.IsDefined(Structure)) + { + writer.WritePropertyName("structure"u8); + JsonSerializer.Serialize(writer, Structure); + } + if (Optional.IsDefined(Schema)) + { + writer.WritePropertyName("schema"u8); + JsonSerializer.Serialize(writer, Schema); + } + writer.WritePropertyName("linkedServiceName"u8); + JsonSerializer.Serialize(writer, LinkedServiceName); + if (Optional.IsCollectionDefined(Parameters)) + { + writer.WritePropertyName("parameters"u8); + writer.WriteStartObject(); + foreach (var item in Parameters) + { + writer.WritePropertyName(item.Key); + writer.WriteObjectValue(item.Value); + } + writer.WriteEndObject(); + } + if (Optional.IsCollectionDefined(Annotations)) + { + writer.WritePropertyName("annotations"u8); + writer.WriteStartArray(); + foreach (var item in Annotations) + { + if (item == null) + { + writer.WriteNullValue(); + continue; + } +#if NET6_0_OR_GREATER + writer.WriteRawValue(item); +#else + using (JsonDocument document = JsonDocument.Parse(item)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(Folder)) + { + writer.WritePropertyName("folder"u8); + writer.WriteObjectValue(Folder); + } + writer.WritePropertyName("typeProperties"u8); + writer.WriteStartObject(); + if (Optional.IsDefined(ObjectApiName)) + { + writer.WritePropertyName("objectApiName"u8); + JsonSerializer.Serialize(writer, ObjectApiName); + } + if (Optional.IsDefined(ReportId)) + { + writer.WritePropertyName("reportId"u8); + JsonSerializer.Serialize(writer, ReportId); + } + writer.WriteEndObject(); + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceV2ObjectDataset DeserializeSalesforceV2ObjectDataset(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string type = default; + Optional description = default; + Optional>> structure = default; + Optional>> schema = default; + DataFactoryLinkedServiceReference linkedServiceName = default; + Optional> parameters = default; + Optional> annotations = default; + Optional folder = default; + Optional> objectApiName = default; + Optional> reportId = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("description"u8)) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("structure"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + structure = JsonSerializer.Deserialize>>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("schema"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + schema = JsonSerializer.Deserialize>>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("linkedServiceName"u8)) + { + linkedServiceName = JsonSerializer.Deserialize(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("parameters"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + Dictionary dictionary = new Dictionary(); + foreach (var property0 in property.Value.EnumerateObject()) + { + dictionary.Add(property0.Name, EntityParameterSpecification.DeserializeEntityParameterSpecification(property0.Value)); + } + parameters = dictionary; + continue; + } + if (property.NameEquals("annotations"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + if (item.ValueKind == JsonValueKind.Null) + { + array.Add(null); + } + else + { + array.Add(BinaryData.FromString(item.GetRawText())); + } + } + annotations = array; + continue; + } + if (property.NameEquals("folder"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + folder = DatasetFolder.DeserializeDatasetFolder(property.Value); + continue; + } + if (property.NameEquals("typeProperties"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + foreach (var property0 in property.Value.EnumerateObject()) + { + if (property0.NameEquals("objectApiName"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + objectApiName = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("reportId"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + reportId = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + } + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceV2ObjectDataset(type, description.Value, structure.Value, schema.Value, linkedServiceName, Optional.ToDictionary(parameters), Optional.ToList(annotations), folder.Value, additionalProperties, objectApiName.Value, reportId.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2ObjectDataset.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2ObjectDataset.cs new file mode 100644 index 0000000000000..1a66f2eec9388 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2ObjectDataset.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// The Salesforce V2 object dataset. + public partial class SalesforceV2ObjectDataset : DataFactoryDatasetProperties + { + /// Initializes a new instance of . + /// Linked service reference. + /// is null. + public SalesforceV2ObjectDataset(DataFactoryLinkedServiceReference linkedServiceName) : base(linkedServiceName) + { + Argument.AssertNotNull(linkedServiceName, nameof(linkedServiceName)); + + DatasetType = "SalesforceV2Object"; + } + + /// Initializes a new instance of . + /// Type of dataset. + /// Dataset description. + /// Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. + /// Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. + /// Linked service reference. + /// Parameters for dataset. + /// List of tags that can be used for describing the Dataset. + /// The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + /// Additional Properties. + /// The Salesforce V2 object API name. Type: string (or Expression with resultType string). + /// The Salesforce V2 report Id. Type: string (or Expression with resultType string). + internal SalesforceV2ObjectDataset(string datasetType, string description, DataFactoryElement> structure, DataFactoryElement> schema, DataFactoryLinkedServiceReference linkedServiceName, IDictionary parameters, IList annotations, DatasetFolder folder, IDictionary additionalProperties, DataFactoryElement objectApiName, DataFactoryElement reportId) : base(datasetType, description, structure, schema, linkedServiceName, parameters, annotations, folder, additionalProperties) + { + ObjectApiName = objectApiName; + ReportId = reportId; + DatasetType = datasetType ?? "SalesforceV2Object"; + } + + /// The Salesforce V2 object API name. Type: string (or Expression with resultType string). + public DataFactoryElement ObjectApiName { get; set; } + /// The Salesforce V2 report Id. Type: string (or Expression with resultType string). + public DataFactoryElement ReportId { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Sink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Sink.Serialization.cs new file mode 100644 index 0000000000000..f113a61190c59 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Sink.Serialization.cs @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceV2Sink : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(WriteBehavior)) + { + writer.WritePropertyName("writeBehavior"u8); + writer.WriteStringValue(WriteBehavior.Value.ToString()); + } + if (Optional.IsDefined(ExternalIdFieldName)) + { + writer.WritePropertyName("externalIdFieldName"u8); + JsonSerializer.Serialize(writer, ExternalIdFieldName); + } + if (Optional.IsDefined(IgnoreNullValues)) + { + writer.WritePropertyName("ignoreNullValues"u8); + JsonSerializer.Serialize(writer, IgnoreNullValues); + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(CopySinkType); + if (Optional.IsDefined(WriteBatchSize)) + { + writer.WritePropertyName("writeBatchSize"u8); + JsonSerializer.Serialize(writer, WriteBatchSize); + } + if (Optional.IsDefined(WriteBatchTimeout)) + { + writer.WritePropertyName("writeBatchTimeout"u8); + JsonSerializer.Serialize(writer, WriteBatchTimeout); + } + if (Optional.IsDefined(SinkRetryCount)) + { + writer.WritePropertyName("sinkRetryCount"u8); + JsonSerializer.Serialize(writer, SinkRetryCount); + } + if (Optional.IsDefined(SinkRetryWait)) + { + writer.WritePropertyName("sinkRetryWait"u8); + JsonSerializer.Serialize(writer, SinkRetryWait); + } + if (Optional.IsDefined(MaxConcurrentConnections)) + { + writer.WritePropertyName("maxConcurrentConnections"u8); + JsonSerializer.Serialize(writer, MaxConcurrentConnections); + } + if (Optional.IsDefined(DisableMetricsCollection)) + { + writer.WritePropertyName("disableMetricsCollection"u8); + JsonSerializer.Serialize(writer, DisableMetricsCollection); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceV2Sink DeserializeSalesforceV2Sink(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Optional writeBehavior = default; + Optional> externalIdFieldName = default; + Optional> ignoreNullValues = default; + string type = default; + Optional> writeBatchSize = default; + Optional> writeBatchTimeout = default; + Optional> sinkRetryCount = default; + Optional> sinkRetryWait = default; + Optional> maxConcurrentConnections = default; + Optional> disableMetricsCollection = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("writeBehavior"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBehavior = new SalesforceV2SinkWriteBehavior(property.Value.GetString()); + continue; + } + if (property.NameEquals("externalIdFieldName"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + externalIdFieldName = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("ignoreNullValues"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + ignoreNullValues = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("writeBatchSize"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBatchSize = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("writeBatchTimeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + writeBatchTimeout = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sinkRetryCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sinkRetryCount = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sinkRetryWait"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sinkRetryWait = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("maxConcurrentConnections"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxConcurrentConnections = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("disableMetricsCollection"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disableMetricsCollection = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceV2Sink(type, writeBatchSize.Value, writeBatchTimeout.Value, sinkRetryCount.Value, sinkRetryWait.Value, maxConcurrentConnections.Value, disableMetricsCollection.Value, additionalProperties, Optional.ToNullable(writeBehavior), externalIdFieldName.Value, ignoreNullValues.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Sink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Sink.cs new file mode 100644 index 0000000000000..03626c835b3a8 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Sink.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// A copy activity Salesforce V2 sink. + public partial class SalesforceV2Sink : CopySink + { + /// Initializes a new instance of . + public SalesforceV2Sink() + { + CopySinkType = "SalesforceV2Sink"; + } + + /// Initializes a new instance of . + /// Copy sink type. + /// Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. + /// Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Sink retry count. Type: integer (or Expression with resultType integer). + /// Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). + /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). + /// Additional Properties. + /// The write behavior for the operation. Default is Insert. + /// The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + /// The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + internal SalesforceV2Sink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, SalesforceV2SinkWriteBehavior? writeBehavior, DataFactoryElement externalIdFieldName, DataFactoryElement ignoreNullValues) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + { + WriteBehavior = writeBehavior; + ExternalIdFieldName = externalIdFieldName; + IgnoreNullValues = ignoreNullValues; + CopySinkType = copySinkType ?? "SalesforceV2Sink"; + } + + /// The write behavior for the operation. Default is Insert. + public SalesforceV2SinkWriteBehavior? WriteBehavior { get; set; } + /// The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + public DataFactoryElement ExternalIdFieldName { get; set; } + /// The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + public DataFactoryElement IgnoreNullValues { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2SinkWriteBehavior.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2SinkWriteBehavior.cs new file mode 100644 index 0000000000000..c793d9a983809 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2SinkWriteBehavior.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// The write behavior for the operation. Default is Insert. + public readonly partial struct SalesforceV2SinkWriteBehavior : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public SalesforceV2SinkWriteBehavior(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string InsertValue = "Insert"; + private const string UpsertValue = "Upsert"; + + /// Insert. + public static SalesforceV2SinkWriteBehavior Insert { get; } = new SalesforceV2SinkWriteBehavior(InsertValue); + /// Upsert. + public static SalesforceV2SinkWriteBehavior Upsert { get; } = new SalesforceV2SinkWriteBehavior(UpsertValue); + /// Determines if two values are the same. + public static bool operator ==(SalesforceV2SinkWriteBehavior left, SalesforceV2SinkWriteBehavior right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(SalesforceV2SinkWriteBehavior left, SalesforceV2SinkWriteBehavior right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator SalesforceV2SinkWriteBehavior(string value) => new SalesforceV2SinkWriteBehavior(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is SalesforceV2SinkWriteBehavior other && Equals(other); + /// + public bool Equals(SalesforceV2SinkWriteBehavior other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Source.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Source.Serialization.cs new file mode 100644 index 0000000000000..8c3444650ce31 --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Source.Serialization.cs @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + public partial class SalesforceV2Source : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(SoqlQuery)) + { + writer.WritePropertyName("SOQLQuery"u8); + JsonSerializer.Serialize(writer, SoqlQuery); + } + if (Optional.IsDefined(ReadBehavior)) + { + writer.WritePropertyName("readBehavior"u8); + JsonSerializer.Serialize(writer, ReadBehavior); + } + if (Optional.IsDefined(QueryTimeout)) + { + writer.WritePropertyName("queryTimeout"u8); + JsonSerializer.Serialize(writer, QueryTimeout); + } + if (Optional.IsDefined(AdditionalColumns)) + { + writer.WritePropertyName("additionalColumns"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(AdditionalColumns); +#else + using (JsonDocument document = JsonDocument.Parse(AdditionalColumns)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WritePropertyName("type"u8); + writer.WriteStringValue(CopySourceType); + if (Optional.IsDefined(SourceRetryCount)) + { + writer.WritePropertyName("sourceRetryCount"u8); + JsonSerializer.Serialize(writer, SourceRetryCount); + } + if (Optional.IsDefined(SourceRetryWait)) + { + writer.WritePropertyName("sourceRetryWait"u8); + JsonSerializer.Serialize(writer, SourceRetryWait); + } + if (Optional.IsDefined(MaxConcurrentConnections)) + { + writer.WritePropertyName("maxConcurrentConnections"u8); + JsonSerializer.Serialize(writer, MaxConcurrentConnections); + } + if (Optional.IsDefined(DisableMetricsCollection)) + { + writer.WritePropertyName("disableMetricsCollection"u8); + JsonSerializer.Serialize(writer, DisableMetricsCollection); + } + foreach (var item in AdditionalProperties) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + writer.WriteEndObject(); + } + + internal static SalesforceV2Source DeserializeSalesforceV2Source(JsonElement element) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Optional> soqlQuery = default; + Optional> readBehavior = default; + Optional> queryTimeout = default; + Optional additionalColumns = default; + string type = default; + Optional> sourceRetryCount = default; + Optional> sourceRetryWait = default; + Optional> maxConcurrentConnections = default; + Optional> disableMetricsCollection = default; + IDictionary additionalProperties = default; + Dictionary additionalPropertiesDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("SOQLQuery"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + soqlQuery = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("readBehavior"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + readBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("queryTimeout"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + queryTimeout = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("additionalColumns"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + additionalColumns = BinaryData.FromString(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("type"u8)) + { + type = property.Value.GetString(); + continue; + } + if (property.NameEquals("sourceRetryCount"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sourceRetryCount = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("sourceRetryWait"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sourceRetryWait = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("maxConcurrentConnections"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxConcurrentConnections = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + if (property.NameEquals("disableMetricsCollection"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + disableMetricsCollection = JsonSerializer.Deserialize>(property.Value.GetRawText()); + continue; + } + additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + additionalProperties = additionalPropertiesDictionary; + return new SalesforceV2Source(type, sourceRetryCount.Value, sourceRetryWait.Value, maxConcurrentConnections.Value, disableMetricsCollection.Value, additionalProperties, queryTimeout.Value, additionalColumns.Value, soqlQuery.Value, readBehavior.Value); + } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Source.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Source.cs new file mode 100644 index 0000000000000..e50166fd4648c --- /dev/null +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SalesforceV2Source.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using Azure.Core.Expressions.DataFactory; + +namespace Azure.ResourceManager.DataFactory.Models +{ + /// A copy activity Salesforce V2 source. + public partial class SalesforceV2Source : TabularSource + { + /// Initializes a new instance of . + public SalesforceV2Source() + { + CopySourceType = "SalesforceV2Source"; + } + + /// Initializes a new instance of . + /// Copy source type. + /// Source retry count. Type: integer (or Expression with resultType integer). + /// Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). + /// Additional Properties. + /// Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + /// Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). + /// Database query. Type: string (or Expression with resultType string). + /// The read behavior for the operation. Default is query. Allowed values: query/queryAll. Type: string (or Expression with resultType string). + internal SalesforceV2Source(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement soqlQuery, DataFactoryElement readBehavior) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) + { + SoqlQuery = soqlQuery; + ReadBehavior = readBehavior; + CopySourceType = copySourceType ?? "SalesforceV2Source"; + } + + /// Database query. Type: string (or Expression with resultType string). + public DataFactoryElement SoqlQuery { get; set; } + /// The read behavior for the operation. Default is query. Allowed values: query/queryAll. Type: string (or Expression with resultType string). + public DataFactoryElement ReadBehavior { get; set; } + } +} diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.Serialization.cs index b46e00228802a..06cc29f52a1d3 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.Serialization.cs @@ -45,6 +45,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -72,6 +82,7 @@ internal static SftpWriteSettings DeserializeSftpWriteSettings(JsonElement eleme Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -126,10 +137,24 @@ internal static SftpWriteSettings DeserializeSftpWriteSettings(JsonElement eleme copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new SftpWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties, operationTimeout.Value, useTempFileRename.Value); + return new SftpWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties, operationTimeout.Value, useTempFileRename.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.cs index 62cbc1b9390b6..dc500af29a799 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SftpWriteSettings.cs @@ -25,10 +25,11 @@ public SftpWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. /// Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). /// Upload to temporary file(s) and rename. Disable this option if your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType boolean). - internal SftpWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties, DataFactoryElement operationTimeout, DataFactoryElement useTempFileRename) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal SftpWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties, DataFactoryElement operationTimeout, DataFactoryElement useTempFileRename) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { OperationTimeout = operationTimeout; UseTempFileRename = useTempFileRename; diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.Serialization.cs index 688941d550c6e..be67d26ba1d73 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.Serialization.cs @@ -66,14 +66,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("typeProperties"u8); writer.WriteStartObject(); writer.WritePropertyName("connectionString"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(ConnectionString); -#else - using (JsonDocument document = JsonDocument.Parse(ConnectionString)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, ConnectionString); if (Optional.IsDefined(Password)) { writer.WritePropertyName("password"u8); @@ -111,7 +104,7 @@ internal static SnowflakeLinkedService DeserializeSnowflakeLinkedService(JsonEle Optional description = default; Optional> parameters = default; Optional> annotations = default; - BinaryData connectionString = default; + DataFactoryElement connectionString = default; Optional password = default; Optional encryptedCredential = default; IDictionary additionalProperties = default; @@ -183,7 +176,7 @@ internal static SnowflakeLinkedService DeserializeSnowflakeLinkedService(JsonEle { if (property0.NameEquals("connectionString"u8)) { - connectionString = BinaryData.FromString(property0.Value.GetRawText()); + connectionString = JsonSerializer.Deserialize>(property0.Value.GetRawText()); continue; } if (property0.NameEquals("password"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.cs index 5e43aed54a47e..f0e6f263d901a 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SnowflakeLinkedService.cs @@ -18,7 +18,7 @@ public partial class SnowflakeLinkedService : DataFactoryLinkedServiceProperties /// Initializes a new instance of . /// The connection string of snowflake. Type: string, SecureString. /// is null. - public SnowflakeLinkedService(BinaryData connectionString) + public SnowflakeLinkedService(DataFactoryElement connectionString) { Argument.AssertNotNull(connectionString, nameof(connectionString)); @@ -36,7 +36,7 @@ public SnowflakeLinkedService(BinaryData connectionString) /// The connection string of snowflake. Type: string, SecureString. /// The Azure key vault secret reference of password in connection string. /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. - internal SnowflakeLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, BinaryData connectionString, DataFactoryKeyVaultSecretReference password, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) + internal SnowflakeLinkedService(string linkedServiceType, IntegrationRuntimeReference connectVia, string description, IDictionary parameters, IList annotations, IDictionary additionalProperties, DataFactoryElement connectionString, DataFactoryKeyVaultSecretReference password, string encryptedCredential) : base(linkedServiceType, connectVia, description, parameters, annotations, additionalProperties) { ConnectionString = connectionString; Password = password; @@ -44,37 +44,8 @@ internal SnowflakeLinkedService(string linkedServiceType, IntegrationRuntimeRefe LinkedServiceType = linkedServiceType ?? "Snowflake"; } - /// - /// The connection string of snowflake. Type: string, SecureString. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData ConnectionString { get; set; } + /// The connection string of snowflake. Type: string, SecureString. + public DataFactoryElement ConnectionString { get; set; } /// The Azure key vault secret reference of password in connection string. public DataFactoryKeyVaultSecretReference Password { get; set; } /// The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.Serialization.cs index d8b2205c7487e..4d8857a52f259 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.Serialization.cs @@ -48,14 +48,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(PartitionOption)) { writer.WritePropertyName("partitionOption"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(PartitionOption); -#else - using (JsonDocument document = JsonDocument.Parse(PartitionOption)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, PartitionOption); } if (Optional.IsDefined(PartitionSettings)) { @@ -126,7 +119,7 @@ internal static SqlDWSource DeserializeSqlDWSource(JsonElement element) Optional> sqlReaderStoredProcedureName = default; Optional storedProcedureParameters = default; Optional> isolationLevel = default; - Optional partitionOption = default; + Optional> partitionOption = default; Optional partitionSettings = default; Optional> queryTimeout = default; Optional additionalColumns = default; @@ -181,7 +174,7 @@ internal static SqlDWSource DeserializeSqlDWSource(JsonElement element) { continue; } - partitionOption = BinaryData.FromString(property.Value.GetRawText()); + partitionOption = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("partitionSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.cs index e055fc4f5720d..4a8c73b6ccb40 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlDWSource.cs @@ -33,9 +33,9 @@ public SqlDWSource() /// Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). /// Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). /// The settings that will be leveraged for Sql source partitioning. - internal SqlDWSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) + internal SqlDWSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, DataFactoryElement partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) { SqlReaderQuery = sqlReaderQuery; SqlReaderStoredProcedureName = sqlReaderStoredProcedureName; @@ -83,37 +83,8 @@ internal SqlDWSource(string copySourceType, DataFactoryElement sourceRetryC public BinaryData StoredProcedureParameters { get; set; } /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). public DataFactoryElement IsolationLevel { get; set; } - /// - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData PartitionOption { get; set; } + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + public DataFactoryElement PartitionOption { get; set; } /// The settings that will be leveraged for Sql source partitioning. public SqlPartitionSettings PartitionSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.Serialization.cs index a229513be6059..5a0da99a807c9 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.Serialization.cs @@ -63,14 +63,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(WriteBehavior)) { writer.WritePropertyName("writeBehavior"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(WriteBehavior); -#else - using (JsonDocument document = JsonDocument.Parse(WriteBehavior)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, WriteBehavior); } if (Optional.IsDefined(UpsertSettings)) { @@ -137,7 +130,7 @@ internal static SqlMISink DeserializeSqlMISink(JsonElement element) Optional> storedProcedureTableTypeParameterName = default; Optional> tableOption = default; Optional> sqlWriterUseTableLock = default; - Optional writeBehavior = default; + Optional> writeBehavior = default; Optional upsertSettings = default; string type = default; Optional> writeBatchSize = default; @@ -219,7 +212,7 @@ internal static SqlMISink DeserializeSqlMISink(JsonElement element) { continue; } - writeBehavior = BinaryData.FromString(property.Value.GetRawText()); + writeBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("upsertSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.cs index b0416c9b7c337..51abbb5daa932 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISink.cs @@ -36,9 +36,9 @@ public SqlMISink() /// The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). /// The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). /// Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - /// White behavior when copying data into azure SQL MI. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + /// White behavior when copying data into azure SQL MI. Type: string (or Expression with resultType string). /// SQL upsert settings. - internal SqlMISink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement sqlWriterStoredProcedureName, DataFactoryElement sqlWriterTableType, DataFactoryElement preCopyScript, BinaryData storedProcedureParameters, DataFactoryElement storedProcedureTableTypeParameterName, DataFactoryElement tableOption, DataFactoryElement sqlWriterUseTableLock, BinaryData writeBehavior, SqlUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + internal SqlMISink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement sqlWriterStoredProcedureName, DataFactoryElement sqlWriterTableType, DataFactoryElement preCopyScript, BinaryData storedProcedureParameters, DataFactoryElement storedProcedureTableTypeParameterName, DataFactoryElement tableOption, DataFactoryElement sqlWriterUseTableLock, DataFactoryElement writeBehavior, SqlUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { SqlWriterStoredProcedureName = sqlWriterStoredProcedureName; SqlWriterTableType = sqlWriterTableType; @@ -95,37 +95,8 @@ internal SqlMISink(string copySinkType, DataFactoryElement writeBatchSize, public DataFactoryElement TableOption { get; set; } /// Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). public DataFactoryElement SqlWriterUseTableLock { get; set; } - /// - /// White behavior when copying data into azure SQL MI. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum) - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData WriteBehavior { get; set; } + /// White behavior when copying data into azure SQL MI. Type: string (or Expression with resultType string). + public DataFactoryElement WriteBehavior { get; set; } /// SQL upsert settings. public SqlUpsertSettings UpsertSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.Serialization.cs index f23c64cafa868..364e4f2d3dcf8 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.Serialization.cs @@ -60,14 +60,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(PartitionOption)) { writer.WritePropertyName("partitionOption"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(PartitionOption); -#else - using (JsonDocument document = JsonDocument.Parse(PartitionOption)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, PartitionOption); } if (Optional.IsDefined(PartitionSettings)) { @@ -139,7 +132,7 @@ internal static SqlMISource DeserializeSqlMISource(JsonElement element) Optional storedProcedureParameters = default; Optional> isolationLevel = default; Optional produceAdditionalTypes = default; - Optional partitionOption = default; + Optional> partitionOption = default; Optional partitionSettings = default; Optional> queryTimeout = default; Optional additionalColumns = default; @@ -203,7 +196,7 @@ internal static SqlMISource DeserializeSqlMISource(JsonElement element) { continue; } - partitionOption = BinaryData.FromString(property.Value.GetRawText()); + partitionOption = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("partitionSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.cs index ac2c33224b0b5..b3f4196d7512a 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlMISource.cs @@ -34,9 +34,9 @@ public SqlMISource() /// Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). /// Which additional types to produce. - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). /// The settings that will be leveraged for Sql source partitioning. - internal SqlMISource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData produceAdditionalTypes, BinaryData partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) + internal SqlMISource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData produceAdditionalTypes, DataFactoryElement partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) { SqlReaderQuery = sqlReaderQuery; SqlReaderStoredProcedureName = sqlReaderStoredProcedureName; @@ -116,37 +116,8 @@ internal SqlMISource(string copySourceType, DataFactoryElement sourceRetryC /// /// public BinaryData ProduceAdditionalTypes { get; set; } - /// - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData PartitionOption { get; set; } + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + public DataFactoryElement PartitionOption { get; set; } /// The settings that will be leveraged for Sql source partitioning. public SqlPartitionSettings PartitionSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.Serialization.cs index 159d013381083..0e8466d5f5c3a 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.Serialization.cs @@ -63,14 +63,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(WriteBehavior)) { writer.WritePropertyName("writeBehavior"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(WriteBehavior); -#else - using (JsonDocument document = JsonDocument.Parse(WriteBehavior)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, WriteBehavior); } if (Optional.IsDefined(UpsertSettings)) { @@ -137,7 +130,7 @@ internal static SqlServerSink DeserializeSqlServerSink(JsonElement element) Optional> storedProcedureTableTypeParameterName = default; Optional> tableOption = default; Optional> sqlWriterUseTableLock = default; - Optional writeBehavior = default; + Optional> writeBehavior = default; Optional upsertSettings = default; string type = default; Optional> writeBatchSize = default; @@ -219,7 +212,7 @@ internal static SqlServerSink DeserializeSqlServerSink(JsonElement element) { continue; } - writeBehavior = BinaryData.FromString(property.Value.GetRawText()); + writeBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("upsertSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.cs index ff40e1bcd221b..0055d024b1c12 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSink.cs @@ -36,9 +36,9 @@ public SqlServerSink() /// The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). /// The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). /// Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - /// Write behavior when copying data into sql server. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + /// Write behavior when copying data into sql server. Type: string (or Expression with resultType string). /// SQL upsert settings. - internal SqlServerSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement sqlWriterStoredProcedureName, DataFactoryElement sqlWriterTableType, DataFactoryElement preCopyScript, BinaryData storedProcedureParameters, DataFactoryElement storedProcedureTableTypeParameterName, DataFactoryElement tableOption, DataFactoryElement sqlWriterUseTableLock, BinaryData writeBehavior, SqlUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + internal SqlServerSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement sqlWriterStoredProcedureName, DataFactoryElement sqlWriterTableType, DataFactoryElement preCopyScript, BinaryData storedProcedureParameters, DataFactoryElement storedProcedureTableTypeParameterName, DataFactoryElement tableOption, DataFactoryElement sqlWriterUseTableLock, DataFactoryElement writeBehavior, SqlUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { SqlWriterStoredProcedureName = sqlWriterStoredProcedureName; SqlWriterTableType = sqlWriterTableType; @@ -95,37 +95,8 @@ internal SqlServerSink(string copySinkType, DataFactoryElement writeBatchSi public DataFactoryElement TableOption { get; set; } /// Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). public DataFactoryElement SqlWriterUseTableLock { get; set; } - /// - /// Write behavior when copying data into sql server. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum) - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData WriteBehavior { get; set; } + /// Write behavior when copying data into sql server. Type: string (or Expression with resultType string). + public DataFactoryElement WriteBehavior { get; set; } /// SQL upsert settings. public SqlUpsertSettings UpsertSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.Serialization.cs index d72e0fae48f2a..bd1f25df54f51 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.Serialization.cs @@ -60,14 +60,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(PartitionOption)) { writer.WritePropertyName("partitionOption"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(PartitionOption); -#else - using (JsonDocument document = JsonDocument.Parse(PartitionOption)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, PartitionOption); } if (Optional.IsDefined(PartitionSettings)) { @@ -139,7 +132,7 @@ internal static SqlServerSource DeserializeSqlServerSource(JsonElement element) Optional storedProcedureParameters = default; Optional> isolationLevel = default; Optional produceAdditionalTypes = default; - Optional partitionOption = default; + Optional> partitionOption = default; Optional partitionSettings = default; Optional> queryTimeout = default; Optional additionalColumns = default; @@ -203,7 +196,7 @@ internal static SqlServerSource DeserializeSqlServerSource(JsonElement element) { continue; } - partitionOption = BinaryData.FromString(property.Value.GetRawText()); + partitionOption = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("partitionSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.cs index 22d2a055f05e8..a2534e11e1798 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlServerSource.cs @@ -34,9 +34,9 @@ public SqlServerSource() /// Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). /// Which additional types to produce. - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). /// The settings that will be leveraged for Sql source partitioning. - internal SqlServerSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData produceAdditionalTypes, BinaryData partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) + internal SqlServerSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData produceAdditionalTypes, DataFactoryElement partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) { SqlReaderQuery = sqlReaderQuery; SqlReaderStoredProcedureName = sqlReaderStoredProcedureName; @@ -116,37 +116,8 @@ internal SqlServerSource(string copySourceType, DataFactoryElement sourceRe /// /// public BinaryData ProduceAdditionalTypes { get; set; } - /// - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData PartitionOption { get; set; } + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + public DataFactoryElement PartitionOption { get; set; } /// The settings that will be leveraged for Sql source partitioning. public SqlPartitionSettings PartitionSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.Serialization.cs index 9bc18ca58caa2..8e3fb1e8764b9 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.Serialization.cs @@ -63,14 +63,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(WriteBehavior)) { writer.WritePropertyName("writeBehavior"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(WriteBehavior); -#else - using (JsonDocument document = JsonDocument.Parse(WriteBehavior)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, WriteBehavior); } if (Optional.IsDefined(UpsertSettings)) { @@ -137,7 +130,7 @@ internal static SqlSink DeserializeSqlSink(JsonElement element) Optional> storedProcedureTableTypeParameterName = default; Optional> tableOption = default; Optional> sqlWriterUseTableLock = default; - Optional writeBehavior = default; + Optional> writeBehavior = default; Optional upsertSettings = default; string type = default; Optional> writeBatchSize = default; @@ -219,7 +212,7 @@ internal static SqlSink DeserializeSqlSink(JsonElement element) { continue; } - writeBehavior = BinaryData.FromString(property.Value.GetRawText()); + writeBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("upsertSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.cs index 176d88b33ee4c..ad871589a7bd9 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSink.cs @@ -36,9 +36,9 @@ public SqlSink() /// The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). /// The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). /// Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). - /// Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum). + /// Write behavior when copying data into sql. Type: string (or Expression with resultType string). /// SQL upsert settings. - internal SqlSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement sqlWriterStoredProcedureName, DataFactoryElement sqlWriterTableType, DataFactoryElement preCopyScript, BinaryData storedProcedureParameters, DataFactoryElement storedProcedureTableTypeParameterName, DataFactoryElement tableOption, DataFactoryElement sqlWriterUseTableLock, BinaryData writeBehavior, SqlUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) + internal SqlSink(string copySinkType, DataFactoryElement writeBatchSize, DataFactoryElement writeBatchTimeout, DataFactoryElement sinkRetryCount, DataFactoryElement sinkRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement sqlWriterStoredProcedureName, DataFactoryElement sqlWriterTableType, DataFactoryElement preCopyScript, BinaryData storedProcedureParameters, DataFactoryElement storedProcedureTableTypeParameterName, DataFactoryElement tableOption, DataFactoryElement sqlWriterUseTableLock, DataFactoryElement writeBehavior, SqlUpsertSettings upsertSettings) : base(copySinkType, writeBatchSize, writeBatchTimeout, sinkRetryCount, sinkRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties) { SqlWriterStoredProcedureName = sqlWriterStoredProcedureName; SqlWriterTableType = sqlWriterTableType; @@ -95,37 +95,8 @@ internal SqlSink(string copySinkType, DataFactoryElement writeBatchSize, Da public DataFactoryElement TableOption { get; set; } /// Whether to use table lock during bulk copy. Type: boolean (or Expression with resultType boolean). public DataFactoryElement SqlWriterUseTableLock { get; set; } - /// - /// Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum) - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData WriteBehavior { get; set; } + /// Write behavior when copying data into sql. Type: string (or Expression with resultType string). + public DataFactoryElement WriteBehavior { get; set; } /// SQL upsert settings. public SqlUpsertSettings UpsertSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.Serialization.cs index 109c6420ff7bc..d53e67a7cf55d 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.Serialization.cs @@ -48,14 +48,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) if (Optional.IsDefined(PartitionOption)) { writer.WritePropertyName("partitionOption"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(PartitionOption); -#else - using (JsonDocument document = JsonDocument.Parse(PartitionOption)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, PartitionOption); } if (Optional.IsDefined(PartitionSettings)) { @@ -126,7 +119,7 @@ internal static SqlSource DeserializeSqlSource(JsonElement element) Optional> sqlReaderStoredProcedureName = default; Optional storedProcedureParameters = default; Optional> isolationLevel = default; - Optional partitionOption = default; + Optional> partitionOption = default; Optional partitionSettings = default; Optional> queryTimeout = default; Optional additionalColumns = default; @@ -181,7 +174,7 @@ internal static SqlSource DeserializeSqlSource(JsonElement element) { continue; } - partitionOption = BinaryData.FromString(property.Value.GetRawText()); + partitionOption = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } if (property.NameEquals("partitionSettings"u8)) diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.cs index 0e924e21063f8..73b7be89292bb 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SqlSource.cs @@ -33,9 +33,9 @@ public SqlSource() /// Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). /// Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). /// The settings that will be leveraged for Sql source partitioning. - internal SqlSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, BinaryData partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) + internal SqlSource(string copySourceType, DataFactoryElement sourceRetryCount, DataFactoryElement sourceRetryWait, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, IDictionary additionalProperties, DataFactoryElement queryTimeout, BinaryData additionalColumns, DataFactoryElement sqlReaderQuery, DataFactoryElement sqlReaderStoredProcedureName, BinaryData storedProcedureParameters, DataFactoryElement isolationLevel, DataFactoryElement partitionOption, SqlPartitionSettings partitionSettings) : base(copySourceType, sourceRetryCount, sourceRetryWait, maxConcurrentConnections, disableMetricsCollection, additionalProperties, queryTimeout, additionalColumns) { SqlReaderQuery = sqlReaderQuery; SqlReaderStoredProcedureName = sqlReaderStoredProcedureName; @@ -83,37 +83,8 @@ internal SqlSource(string copySourceType, DataFactoryElement sourceRetryCou public BinaryData StoredProcedureParameters { get; set; } /// Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is ReadCommitted. Type: string (or Expression with resultType string). public DataFactoryElement IsolationLevel { get; set; } - /// - /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData PartitionOption { get; set; } + /// The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". Type: string (or Expression with resultType string). + public DataFactoryElement PartitionOption { get; set; } /// The settings that will be leveraged for Sql source partitioning. public SqlPartitionSettings PartitionSettings { get; set; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.Serialization.cs index 97b597f02e017..e95050cb749cd 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.Serialization.cs @@ -32,6 +32,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.cs index 35604c8cd1edd..d07f911493c13 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/StoreWriteSettings.cs @@ -22,6 +22,7 @@ public partial class StoreWriteSettings /// Initializes a new instance of . public StoreWriteSettings() { + Metadata = new ChangeTrackingList(); AdditionalProperties = new ChangeTrackingDictionary(); } @@ -30,13 +31,15 @@ public StoreWriteSettings() /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. - internal StoreWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties) + internal StoreWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties) { StoreWriteSettingsType = storeWriteSettingsType; MaxConcurrentConnections = maxConcurrentConnections; DisableMetricsCollection = disableMetricsCollection; CopyBehavior = copyBehavior; + Metadata = metadata; AdditionalProperties = additionalProperties; } @@ -48,6 +51,8 @@ internal StoreWriteSettings(string storeWriteSettingsType, DataFactoryElement DisableMetricsCollection { get; set; } /// The type of copy behavior for copy sink. public DataFactoryElement CopyBehavior { get; set; } + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). + public IList Metadata { get; } /// /// Additional Properties /// diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.Serialization.cs index 0f4bc9d4560b3..e9cd34e1fafcc 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.Serialization.cs @@ -5,9 +5,9 @@ #nullable disable -using System; using System.Text.Json; using Azure.Core; +using Azure.Core.Expressions.DataFactory; namespace Azure.ResourceManager.DataFactory.Models { @@ -19,14 +19,7 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("type"u8); writer.WriteStringValue(SparkJobReferenceType.ToString()); writer.WritePropertyName("referenceName"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(ReferenceName); -#else - using (JsonDocument document = JsonDocument.Parse(ReferenceName)) - { - JsonSerializer.Serialize(writer, document.RootElement); - } -#endif + JsonSerializer.Serialize(writer, ReferenceName); writer.WriteEndObject(); } @@ -37,7 +30,7 @@ internal static SynapseSparkJobReference DeserializeSynapseSparkJobReference(Jso return null; } SparkJobReferenceType type = default; - BinaryData referenceName = default; + DataFactoryElement referenceName = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("type"u8)) @@ -47,7 +40,7 @@ internal static SynapseSparkJobReference DeserializeSynapseSparkJobReference(Jso } if (property.NameEquals("referenceName"u8)) { - referenceName = BinaryData.FromString(property.Value.GetRawText()); + referenceName = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.cs index 278107acddf49..f72b7e6e28be6 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/SynapseSparkJobReference.cs @@ -7,6 +7,7 @@ using System; using Azure.Core; +using Azure.Core.Expressions.DataFactory; namespace Azure.ResourceManager.DataFactory.Models { @@ -17,7 +18,7 @@ public partial class SynapseSparkJobReference /// Synapse spark job reference type. /// Reference spark job name. Expression with resultType string. /// is null. - public SynapseSparkJobReference(SparkJobReferenceType sparkJobReferenceType, BinaryData referenceName) + public SynapseSparkJobReference(SparkJobReferenceType sparkJobReferenceType, DataFactoryElement referenceName) { Argument.AssertNotNull(referenceName, nameof(referenceName)); @@ -27,36 +28,7 @@ public SynapseSparkJobReference(SparkJobReferenceType sparkJobReferenceType, Bin /// Synapse spark job reference type. public SparkJobReferenceType SparkJobReferenceType { get; set; } - /// - /// Reference spark job name. Expression with resultType string. - /// - /// To assign an object to this property use . - /// - /// - /// To assign an already formatted json string to this property use . - /// - /// - /// Examples: - /// - /// - /// BinaryData.FromObjectAsJson("foo") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromString("\"foo\"") - /// Creates a payload of "foo". - /// - /// - /// BinaryData.FromObjectAsJson(new { key = "value" }) - /// Creates a payload of { "key": "value" }. - /// - /// - /// BinaryData.FromString("{\"key\": \"value\"}") - /// Creates a payload of { "key": "value" }. - /// - /// - /// - /// - public BinaryData ReferenceName { get; set; } + /// Reference spark job name. Expression with resultType string. + public DataFactoryElement ReferenceName { get; set; } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.Serialization.cs index dd0806dfac28c..bd04fba7b75f0 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.Serialization.cs @@ -121,6 +121,7 @@ internal static TabularSource DeserializeTabularSource(JsonElement element) case "ResponsysSource": return ResponsysSource.DeserializeResponsysSource(element); case "SalesforceMarketingCloudSource": return SalesforceMarketingCloudSource.DeserializeSalesforceMarketingCloudSource(element); case "SalesforceSource": return SalesforceSource.DeserializeSalesforceSource(element); + case "SalesforceV2Source": return SalesforceV2Source.DeserializeSalesforceV2Source(element); case "SapBwSource": return SapBWSource.DeserializeSapBWSource(element); case "SapCloudForCustomerSource": return SapCloudForCustomerSource.DeserializeSapCloudForCustomerSource(element); case "SapEccSource": return SapEccSource.DeserializeSapEccSource(element); diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.cs index 8fad58cf57a98..0b8c9a6400fc2 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/TabularSource.cs @@ -14,7 +14,7 @@ namespace Azure.ResourceManager.DataFactory.Models /// /// Copy activity sources of tabular type. /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. - /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , and . /// public partial class TabularSource : CopyActivitySource { diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.Serialization.cs index b24cd5271d22f..88fa571d61cb7 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.Serialization.cs @@ -35,6 +35,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("copyBehavior"u8); JsonSerializer.Serialize(writer, CopyBehavior); } + if (Optional.IsCollectionDefined(Metadata)) + { + writer.WritePropertyName("metadata"u8); + writer.WriteStartArray(); + foreach (var item in Metadata) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } foreach (var item in AdditionalProperties) { writer.WritePropertyName(item.Key); @@ -60,6 +70,7 @@ internal static UnknownStoreWriteSettings DeserializeUnknownStoreWriteSettings(J Optional> maxConcurrentConnections = default; Optional> disableMetricsCollection = default; Optional> copyBehavior = default; + Optional> metadata = default; IDictionary additionalProperties = default; Dictionary additionalPropertiesDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -96,10 +107,24 @@ internal static UnknownStoreWriteSettings DeserializeUnknownStoreWriteSettings(J copyBehavior = JsonSerializer.Deserialize>(property.Value.GetRawText()); continue; } + if (property.NameEquals("metadata"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DataFactoryMetadataItemInfo.DeserializeDataFactoryMetadataItemInfo(item)); + } + metadata = array; + continue; + } additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new UnknownStoreWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, additionalProperties); + return new UnknownStoreWriteSettings(type, maxConcurrentConnections.Value, disableMetricsCollection.Value, copyBehavior.Value, Optional.ToList(metadata), additionalProperties); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.cs index 996301fbc5f43..090de9a249939 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/UnknownStoreWriteSettings.cs @@ -19,8 +19,9 @@ internal partial class UnknownStoreWriteSettings : StoreWriteSettings /// The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). /// If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). /// The type of copy behavior for copy sink. + /// Specify the custom metadata to be added to sink data. Type: array of objects (or Expression with resultType array of objects). /// Additional Properties. - internal UnknownStoreWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, additionalProperties) + internal UnknownStoreWriteSettings(string storeWriteSettingsType, DataFactoryElement maxConcurrentConnections, DataFactoryElement disableMetricsCollection, DataFactoryElement copyBehavior, IList metadata, IDictionary additionalProperties) : base(storeWriteSettingsType, maxConcurrentConnections, disableMetricsCollection, copyBehavior, metadata, additionalProperties) { StoreWriteSettingsType = storeWriteSettingsType ?? "Unknown"; } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.Serialization.cs index 0aa25f7c2fcf7..bafb3489fea9b 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.Serialization.cs @@ -73,10 +73,21 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WriteStringValue(Method.ToString()); writer.WritePropertyName("url"u8); JsonSerializer.Serialize(writer, Uri); - if (Optional.IsDefined(Headers)) + if (Optional.IsCollectionDefined(Headers)) { writer.WritePropertyName("headers"u8); - JsonSerializer.Serialize(writer, Headers); + writer.WriteStartObject(); + foreach (var item in Headers) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + JsonSerializer.Serialize(writer, item.Value); + } + writer.WriteEndObject(); } if (Optional.IsDefined(Body)) { @@ -93,6 +104,16 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("disableCertValidation"u8); writer.WriteBooleanValue(DisableCertValidation.Value); } + if (Optional.IsDefined(HttpRequestTimeout)) + { + writer.WritePropertyName("httpRequestTimeout"u8); + JsonSerializer.Serialize(writer, HttpRequestTimeout); + } + if (Optional.IsDefined(TurnOffAsync)) + { + writer.WritePropertyName("turnOffAsync"u8); + writer.WriteBooleanValue(TurnOffAsync.Value); + } if (Optional.IsCollectionDefined(Datasets)) { writer.WritePropertyName("datasets"u8); @@ -151,10 +172,12 @@ internal static WebActivity DeserializeWebActivity(JsonElement element) Optional> userProperties = default; WebActivityMethod method = default; DataFactoryElement url = default; - Optional> headers = default; + Optional>> headers = default; Optional> body = default; Optional authentication = default; Optional disableCertValidation = default; + Optional> httpRequestTimeout = default; + Optional turnOffAsync = default; Optional> datasets = default; Optional> linkedServices = default; Optional connectVia = default; @@ -266,7 +289,19 @@ internal static WebActivity DeserializeWebActivity(JsonElement element) { continue; } - headers = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + Dictionary> dictionary = new Dictionary>(); + foreach (var property1 in property0.Value.EnumerateObject()) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property1.Name, null); + } + else + { + dictionary.Add(property1.Name, JsonSerializer.Deserialize>(property1.Value.GetRawText())); + } + } + headers = dictionary; continue; } if (property0.NameEquals("body"u8)) @@ -296,6 +331,24 @@ internal static WebActivity DeserializeWebActivity(JsonElement element) disableCertValidation = property0.Value.GetBoolean(); continue; } + if (property0.NameEquals("httpRequestTimeout"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + httpRequestTimeout = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + continue; + } + if (property0.NameEquals("turnOffAsync"u8)) + { + if (property0.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + turnOffAsync = property0.Value.GetBoolean(); + continue; + } if (property0.NameEquals("datasets"u8)) { if (property0.Value.ValueKind == JsonValueKind.Null) @@ -339,7 +392,7 @@ internal static WebActivity DeserializeWebActivity(JsonElement element) additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new WebActivity(name, type, description.Value, Optional.ToNullable(state), Optional.ToNullable(onInactiveMarkAs), Optional.ToList(dependsOn), Optional.ToList(userProperties), additionalProperties, linkedServiceName, policy.Value, method, url, headers.Value, body.Value, authentication.Value, Optional.ToNullable(disableCertValidation), Optional.ToList(datasets), Optional.ToList(linkedServices), connectVia.Value); + return new WebActivity(name, type, description.Value, Optional.ToNullable(state), Optional.ToNullable(onInactiveMarkAs), Optional.ToList(dependsOn), Optional.ToList(userProperties), additionalProperties, linkedServiceName, policy.Value, method, url, Optional.ToDictionary(headers), body.Value, authentication.Value, Optional.ToNullable(disableCertValidation), httpRequestTimeout.Value, Optional.ToNullable(turnOffAsync), Optional.ToList(datasets), Optional.ToList(linkedServices), connectVia.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.cs index cfe67f5f6264f..5d5e184bba116 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebActivity.cs @@ -27,6 +27,7 @@ public WebActivity(string name, WebActivityMethod method, DataFactoryElement>(); Datasets = new ChangeTrackingList(); LinkedServices = new ChangeTrackingList(); ActivityType = "WebActivity"; @@ -49,10 +50,12 @@ public WebActivity(string name, WebActivityMethod method, DataFactoryElement Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). /// Authentication method used for calling the endpoint. /// When set to true, Certificate validation will be disabled. + /// Timeout for the HTTP request to get a response. Format is in TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes. + /// Option to disable invoking HTTP GET on location given in response header of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP GET call on location given in http response headers. /// List of datasets passed to web endpoint. /// List of linked services passed to web endpoint. /// The integration runtime reference. - internal WebActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, WebActivityMethod method, DataFactoryElement uri, DataFactoryElement headers, DataFactoryElement body, WebActivityAuthentication authentication, bool? disableCertValidation, IList datasets, IList linkedServices, IntegrationRuntimeReference connectVia) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) + internal WebActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, DataFactoryLinkedServiceReference linkedServiceName, PipelineActivityPolicy policy, WebActivityMethod method, DataFactoryElement uri, IDictionary> headers, DataFactoryElement body, WebActivityAuthentication authentication, bool? disableCertValidation, DataFactoryElement httpRequestTimeout, bool? turnOffAsync, IList datasets, IList linkedServices, IntegrationRuntimeReference connectVia) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties, linkedServiceName, policy) { Method = method; Uri = uri; @@ -60,6 +63,8 @@ internal WebActivity(string name, string activityType, string description, Pipel Body = body; Authentication = authentication; DisableCertValidation = disableCertValidation; + HttpRequestTimeout = httpRequestTimeout; + TurnOffAsync = turnOffAsync; Datasets = datasets; LinkedServices = linkedServices; ConnectVia = connectVia; @@ -71,13 +76,17 @@ internal WebActivity(string name, string activityType, string description, Pipel /// Web activity target endpoint and path. Type: string (or Expression with resultType string). public DataFactoryElement Uri { get; set; } /// Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - public DataFactoryElement Headers { get; set; } + public IDictionary> Headers { get; } /// Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). public DataFactoryElement Body { get; set; } /// Authentication method used for calling the endpoint. public WebActivityAuthentication Authentication { get; set; } /// When set to true, Certificate validation will be disabled. public bool? DisableCertValidation { get; set; } + /// Timeout for the HTTP request to get a response. Format is in TimeSpan (hh:mm:ss). This value is the timeout to get a response, not the activity timeout. The default value is 00:01:00 (1 minute). The range is from 1 to 10 minutes. + public DataFactoryElement HttpRequestTimeout { get; set; } + /// Option to disable invoking HTTP GET on location given in response header of a HTTP 202 Response. If set true, it stops invoking HTTP GET on http location given in response header. If set false then continues to invoke HTTP GET call on location given in http response headers. + public bool? TurnOffAsync { get; set; } /// List of datasets passed to web endpoint. public IList Datasets { get; } /// List of linked services passed to web endpoint. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.Serialization.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.Serialization.cs index 561bd5789be6c..db5be4bd42b98 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.Serialization.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.Serialization.cs @@ -73,10 +73,21 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("timeout"u8); writer.WriteStringValue(Timeout); } - if (Optional.IsDefined(Headers)) + if (Optional.IsCollectionDefined(Headers)) { writer.WritePropertyName("headers"u8); - JsonSerializer.Serialize(writer, Headers); + writer.WriteStartObject(); + foreach (var item in Headers) + { + writer.WritePropertyName(item.Key); + if (item.Value == null) + { + writer.WriteNullValue(); + continue; + } + JsonSerializer.Serialize(writer, item.Value); + } + writer.WriteEndObject(); } if (Optional.IsDefined(Body)) { @@ -126,7 +137,7 @@ internal static WebHookActivity DeserializeWebHookActivity(JsonElement element) WebHookActivityMethod method = default; DataFactoryElement url = default; Optional timeout = default; - Optional> headers = default; + Optional>> headers = default; Optional> body = default; Optional authentication = default; Optional> reportStatusOnCallBack = default; @@ -234,7 +245,19 @@ internal static WebHookActivity DeserializeWebHookActivity(JsonElement element) { continue; } - headers = JsonSerializer.Deserialize>(property0.Value.GetRawText()); + Dictionary> dictionary = new Dictionary>(); + foreach (var property1 in property0.Value.EnumerateObject()) + { + if (property1.Value.ValueKind == JsonValueKind.Null) + { + dictionary.Add(property1.Name, null); + } + else + { + dictionary.Add(property1.Name, JsonSerializer.Deserialize>(property1.Value.GetRawText())); + } + } + headers = dictionary; continue; } if (property0.NameEquals("body"u8)) @@ -270,7 +293,7 @@ internal static WebHookActivity DeserializeWebHookActivity(JsonElement element) additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } additionalProperties = additionalPropertiesDictionary; - return new WebHookActivity(name, type, description.Value, Optional.ToNullable(state), Optional.ToNullable(onInactiveMarkAs), Optional.ToList(dependsOn), Optional.ToList(userProperties), additionalProperties, policy.Value, method, url, timeout.Value, headers.Value, body.Value, authentication.Value, reportStatusOnCallBack.Value); + return new WebHookActivity(name, type, description.Value, Optional.ToNullable(state), Optional.ToNullable(onInactiveMarkAs), Optional.ToList(dependsOn), Optional.ToList(userProperties), additionalProperties, policy.Value, method, url, timeout.Value, Optional.ToDictionary(headers), body.Value, authentication.Value, reportStatusOnCallBack.Value); } } } diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.cs b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.cs index 966de28ab1966..d5247eabc7ee0 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.cs +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/Generated/Models/WebHookActivity.cs @@ -27,6 +27,7 @@ public WebHookActivity(string name, WebHookActivityMethod method, DataFactoryEle Method = method; Uri = uri; + Headers = new ChangeTrackingDictionary>(); ActivityType = "WebHook"; } @@ -47,7 +48,7 @@ public WebHookActivity(string name, WebHookActivityMethod method, DataFactoryEle /// Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). /// Authentication method used for calling the endpoint. /// When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). - internal WebHookActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, SecureInputOutputPolicy policy, WebHookActivityMethod method, DataFactoryElement uri, string timeout, DataFactoryElement headers, DataFactoryElement body, WebActivityAuthentication authentication, DataFactoryElement reportStatusOnCallBack) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties) + internal WebHookActivity(string name, string activityType, string description, PipelineActivityState? state, ActivityOnInactiveMarkAs? onInactiveMarkAs, IList dependsOn, IList userProperties, IDictionary additionalProperties, SecureInputOutputPolicy policy, WebHookActivityMethod method, DataFactoryElement uri, string timeout, IDictionary> headers, DataFactoryElement body, WebActivityAuthentication authentication, DataFactoryElement reportStatusOnCallBack) : base(name, activityType, description, state, onInactiveMarkAs, dependsOn, userProperties, additionalProperties) { Policy = policy; Method = method; @@ -69,7 +70,7 @@ internal WebHookActivity(string name, string activityType, string description, P /// The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). public string Timeout { get; set; } /// Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). - public DataFactoryElement Headers { get; set; } + public IDictionary> Headers { get; } /// Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). public DataFactoryElement Body { get; set; } /// Authentication method used for calling the endpoint. diff --git a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md index e664e4db156e6..7988a199eebbf 100644 --- a/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md +++ b/sdk/datafactory/Azure.ResourceManager.DataFactory/src/autorest.md @@ -9,7 +9,7 @@ generate-model-factory: false csharp: true library-name: DataFactory namespace: Azure.ResourceManager.DataFactory -require: https://github.com/Azure/azure-rest-api-specs/blob/78eac0bd58633028293cb1ec1709baa200bed9e2/specification/datafactory/resource-manager/readme.md +require: /mnt/vss/_work/1/s/azure-rest-api-specs/specification/datafactory/resource-manager/readme.md output-folder: $(this-folder)/Generated clear-output-folder: true sample-gen: