From c268b64f9c340a1177938f4d35844570854deea6 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Tue, 31 Oct 2023 02:51:09 +0000 Subject: [PATCH] CodeGen from PR 26480 in Azure/azure-rest-api-specs Merge 6b69c855bee21666c7e69e56af4442b6b9ffce21 into 98d4950539bb131648779ab173ec4e4c8850c2d0 --- .../datafactory/armdatafactory/CHANGELOG.md | 228 + .../armdatafactory/activityruns_client.go | 19 +- .../activityruns_client_example_test.go | 138 - .../datafactory/armdatafactory/autorest.md | 6 +- .../changedatacapture_client.go | 487 ++ .../armdatafactory/client_factory.go | 84 +- .../datafactory/armdatafactory/constants.go | 162 +- .../credentialoperations_client.go | 38 +- ...redentialoperations_client_example_test.go | 149 - .../armdatafactory/datafactory_live_test.go | 2 +- .../dataflowdebugsession_client.go | 68 +- ...ataflowdebugsession_client_example_test.go | 265 - .../armdatafactory/dataflows_client.go | 38 +- .../dataflows_client_example_test.go | 460 -- .../armdatafactory/datasets_client.go | 38 +- .../datasets_client_example_test.go | 334 - .../armdatafactory/exposurecontrol_client.go | 51 +- .../exposurecontrol_client_example_test.go | 114 - .../armdatafactory/factories_client.go | 102 +- .../factories_client_example_test.go | 592 -- .../armdatafactory/globalparameters_client.go | 38 +- .../globalparameters_client_example_test.go | 209 - .../datafactory/armdatafactory/go.mod | 16 +- .../datafactory/armdatafactory/go.sum | 20 +- .../integrationruntimenodes_client.go | 49 +- ...grationruntimenodes_client_example_test.go | 144 - ...integrationruntimeobjectmetadata_client.go | 32 +- ...ntimeobjectmetadata_client_example_test.go | 577 -- .../integrationruntimes_client.go | 196 +- ...integrationruntimes_client_example_test.go | 638 -- .../datafactory/armdatafactory/interfaces.go | 379 + .../armdatafactory/linkedservices_client.go | 38 +- .../linkedservices_client_example_test.go | 212 - .../managedprivateendpoints_client.go | 38 +- ...gedprivateendpoints_client_example_test.go | 164 - .../managedvirtualnetworks_client.go | 30 +- ...agedvirtualnetworks_client_example_test.go | 118 - .../datafactory/armdatafactory/models.go | 6904 ++++++++--------- .../armdatafactory/models_serde.go | 1924 ++++- .../armdatafactory/operations_client.go | 3 +- .../operations_client_example_test.go | 1044 --- .../datafactory/armdatafactory/options.go | 642 ++ .../armdatafactory/pipelineruns_client.go | 38 +- .../pipelineruns_client_example_test.go | 158 - .../armdatafactory/pipelines_client.go | 54 +- .../pipelines_client_example_test.go | 558 -- .../armdatafactory/polymorphic_helpers.go | 120 +- .../privateendpointconnection_client.go | 38 +- ...eendpointconnection_client_example_test.go | 119 - .../privateendpointconnections_client.go | 3 +- ...endpointconnections_client_example_test.go | 62 - .../privatelinkresources_client.go | 14 +- ...rivatelinkresources_client_example_test.go | 53 - .../armdatafactory/response_types.go | 115 +- .../armdatafactory/time_rfc3339.go | 3 +- .../armdatafactory/triggerruns_client.go | 35 +- .../triggerruns_client_example_test.go | 103 - .../armdatafactory/triggers_client.go | 117 +- .../triggers_client_example_test.go | 465 -- 59 files changed, 7734 insertions(+), 11111 deletions(-) delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/activityruns_client_example_test.go create mode 100644 sdk/resourcemanager/datafactory/armdatafactory/changedatacapture_client.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/dataflows_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/datasets_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/factories_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client_example_test.go create mode 100644 sdk/resourcemanager/datafactory/armdatafactory/interfaces.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/operations_client_example_test.go create mode 100644 sdk/resourcemanager/datafactory/armdatafactory/options.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/pipelines_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client_example_test.go delete mode 100644 sdk/resourcemanager/datafactory/armdatafactory/triggers_client_example_test.go diff --git a/sdk/resourcemanager/datafactory/armdatafactory/CHANGELOG.md b/sdk/resourcemanager/datafactory/armdatafactory/CHANGELOG.md index c520bd4dd7c9..29f7077009af 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/CHANGELOG.md +++ b/sdk/resourcemanager/datafactory/armdatafactory/CHANGELOG.md @@ -1,5 +1,233 @@ # Release History +## 4.0.0 (2023-10-31) +### Breaking Changes + +- Type of `AmazonMWSLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AmazonRdsForLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AmazonRdsForSQLServerLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AmazonRedshiftLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AmazonS3CompatibleLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AmazonS3LinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AsanaLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureBatchLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureBlobFSLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureDataLakeAnalyticsLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureDataLakeStoreLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureDatabricksDetltaLakeLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureDatabricksLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureFileStorageLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureFunctionLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureMLLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureMLServiceLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureMariaDBLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureMySQLLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzurePostgreSQLLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureSQLDWLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureSQLDatabaseLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureSQLMILinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `AzureSearchLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `CassandraLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `CommonDataServiceForAppsLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ConcurLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `CosmosDbLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `CouchbaseLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `DataworldLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `Db2LinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `DrillLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `DynamicsAXLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `DynamicsCrmLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `DynamicsLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `EloquaLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `FileServerLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `FtpServerLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `GoogleAdWordsLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `GoogleBigQueryLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `GoogleCloudStorageLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `GoogleSheetsLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `GreenplumLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HBaseLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HDInsightHiveActivityTypeProperties.Variables` has been changed from `[]any` to `map[string]any` +- Type of `HDInsightLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HDInsightOnDemandLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HTTPLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HdfsLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HiveLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `HubspotLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ImpalaLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `InformixLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `JiraLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `MagentoLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `MariaDBLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `MarketoLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `MicrosoftAccessLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `MongoDbLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `MySQLLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `NetezzaLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ODataLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `OdbcLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `Office365LinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `OracleCloudStorageLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `OracleLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `OracleServiceCloudLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `PaypalLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `PhoenixLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `PostgreSQLLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `PrestoLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `QuickBooksLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `QuickbaseLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ResponsysLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `RestResourceDatasetTypeProperties.AdditionalHeaders` has been changed from `any` to `map[string]any` +- Type of `RestResourceDatasetTypeProperties.PaginationRules` has been changed from `any` to `map[string]any` +- Type of `RestServiceLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SQLServerLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SalesforceLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SalesforceMarketingCloudLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SalesforceServiceCloudLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SapBWLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SapCloudForCustomerLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SapHanaLinkedServiceProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SapOdpLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SapOpenHubLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SapTableLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ServiceNowLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SftpServerLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SharePointOnlineListLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ShopifyLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SmartsheetLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SnowflakeLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SparkLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SquareLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `SybaseLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `TeamDeskLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `TeradataLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `VerticaLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `XeroLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ZendeskLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Type of `ZohoLinkedServiceTypeProperties.EncryptedCredential` has been changed from `any` to `*string` +- Enum `CosmosDbServicePrincipalCredentialType` has been removed +- Enum `SalesforceSourceReadBehavior` has been removed +- Field `EnablePartitionDiscovery`, `PartitionRootPath` of struct `HTTPReadSettings` has been removed + +### Features Added + +- Type of `AmazonS3CompatibleReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `AmazonS3ReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `AzureBlobFSReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `AzureBlobStorageLinkedServiceTypeProperties.AccountKind` has been changed from `*string` to `any` +- Type of `AzureBlobStorageLinkedServiceTypeProperties.ServiceEndpoint` has been changed from `*string` to `any` +- Type of `AzureBlobStorageReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `AzureDataLakeStoreReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `AzureFileStorageReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `CosmosDbLinkedServiceTypeProperties.ServicePrincipalCredentialType` has been changed from `*CosmosDbServicePrincipalCredentialType` to `any` +- Type of `FileServerReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `FtpReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `FtpReadSettings.UseBinaryTransfer` has been changed from `*bool` to `any` +- Type of `GoogleCloudStorageReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `HdfsReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `OracleCloudStorageReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `SalesforceServiceCloudSource.ReadBehavior` has been changed from `*SalesforceSourceReadBehavior` to `any` +- Type of `SalesforceSource.ReadBehavior` has been changed from `*SalesforceSourceReadBehavior` to `any` +- Type of `SapEccLinkedServiceTypeProperties.URL` has been changed from `*string` to `any` +- Type of `SapEccLinkedServiceTypeProperties.Username` has been changed from `*string` to `any` +- Type of `SftpReadSettings.EnablePartitionDiscovery` has been changed from `*bool` to `any` +- Type of `SynapseNotebookActivityTypeProperties.NumExecutors` has been changed from `*int32` to `any` +- New enum type `ActivityOnInactiveMarkAs` with values `ActivityOnInactiveMarkAsFailed`, `ActivityOnInactiveMarkAsSkipped`, `ActivityOnInactiveMarkAsSucceeded` +- New enum type `ActivityState` with values `ActivityStateActive`, `ActivityStateInactive` +- New enum type `ConnectionType` with values `ConnectionTypeLinkedservicetype` +- New enum type `FrequencyType` with values `FrequencyTypeHour`, `FrequencyTypeMinute`, `FrequencyTypeSecond` +- New enum type `MappingType` with values `MappingTypeAggregate`, `MappingTypeDerived`, `MappingTypeDirect` +- New function `NewChangeDataCaptureClient(string, azcore.TokenCredential, *arm.ClientOptions) (*ChangeDataCaptureClient, error)` +- New function `*ChangeDataCaptureClient.CreateOrUpdate(context.Context, string, string, string, ChangeDataCaptureResource, *ChangeDataCaptureClientCreateOrUpdateOptions) (ChangeDataCaptureClientCreateOrUpdateResponse, error)` +- New function `*ChangeDataCaptureClient.Delete(context.Context, string, string, string, *ChangeDataCaptureClientDeleteOptions) (ChangeDataCaptureClientDeleteResponse, error)` +- New function `*ChangeDataCaptureClient.Get(context.Context, string, string, string, *ChangeDataCaptureClientGetOptions) (ChangeDataCaptureClientGetResponse, error)` +- New function `*ChangeDataCaptureClient.NewListByFactoryPager(string, string, *ChangeDataCaptureClientListByFactoryOptions) *runtime.Pager[ChangeDataCaptureClientListByFactoryResponse]` +- New function `*ChangeDataCaptureClient.Start(context.Context, string, string, string, *ChangeDataCaptureClientStartOptions) (ChangeDataCaptureClientStartResponse, error)` +- New function `*ChangeDataCaptureClient.Status(context.Context, string, string, string, *ChangeDataCaptureClientStatusOptions) (ChangeDataCaptureClientStatusResponse, error)` +- New function `*ChangeDataCaptureClient.Stop(context.Context, string, string, string, *ChangeDataCaptureClientStopOptions) (ChangeDataCaptureClientStopResponse, error)` +- New function `*ClientFactory.NewChangeDataCaptureClient() *ChangeDataCaptureClient` +- New function `*ParquetReadSettings.GetFormatReadSettings() *FormatReadSettings` +- New struct `ChangeDataCapture` +- New struct `ChangeDataCaptureFolder` +- New struct `ChangeDataCaptureListResponse` +- New struct `ChangeDataCaptureResource` +- New struct `DataMapperMapping` +- New struct `IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem` +- New struct `MapperAttributeMapping` +- New struct `MapperAttributeMappings` +- New struct `MapperAttributeReference` +- New struct `MapperConnection` +- New struct `MapperConnectionReference` +- New struct `MapperDslConnectorProperties` +- New struct `MapperPolicy` +- New struct `MapperPolicyRecurrence` +- New struct `MapperSourceConnectionsInfo` +- New struct `MapperTable` +- New struct `MapperTableProperties` +- New struct `MapperTableSchema` +- New struct `MapperTargetConnectionsInfo` +- New struct `ParquetReadSettings` +- New struct `SecureInputOutputPolicy` +- New field `OnInactiveMarkAs`, `State` in struct `Activity` +- New field `IsolationLevel` in struct `AmazonRdsForSQLServerSource` +- New field `OnInactiveMarkAs`, `State` in struct `AppendVariableActivity` +- New field `OnInactiveMarkAs`, `State` in struct `AzureDataExplorerCommandActivity` +- New field `OnInactiveMarkAs`, `State` in struct `AzureFunctionActivity` +- New field `OnInactiveMarkAs`, `State` in struct `AzureMLBatchExecutionActivity` +- New field `OnInactiveMarkAs`, `State` in struct `AzureMLExecutePipelineActivity` +- New field `Authentication` in struct `AzureMLServiceLinkedServiceTypeProperties` +- New field `OnInactiveMarkAs`, `State` in struct `AzureMLUpdateResourceActivity` +- New field `IsolationLevel` in struct `AzureSQLSource` +- New field `OnInactiveMarkAs`, `State` in struct `ControlActivity` +- New field `OnInactiveMarkAs`, `State` in struct `CopyActivity` +- New field `OnInactiveMarkAs`, `State` in struct `CustomActivity` +- New field `OnInactiveMarkAs`, `State` in struct `DataLakeAnalyticsUSQLActivity` +- New field `OnInactiveMarkAs`, `State` in struct `DatabricksNotebookActivity` +- New field `OnInactiveMarkAs`, `State` in struct `DatabricksSparkJarActivity` +- New field `OnInactiveMarkAs`, `State` in struct `DatabricksSparkPythonActivity` +- New field `OnInactiveMarkAs`, `State` in struct `DeleteActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ExecuteDataFlowActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ExecutePipelineActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ExecuteSSISPackageActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ExecuteWranglingDataflowActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ExecutionActivity` +- New field `OnInactiveMarkAs`, `State` in struct `FailActivity` +- New field `OnInactiveMarkAs`, `State` in struct `FilterActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ForEachActivity` +- New field `OnInactiveMarkAs`, `State` in struct `GetMetadataActivity` +- New field `GoogleAdsAPIVersion`, `LoginCustomerID`, `PrivateKey`, `SupportLegacyDataTypes` in struct `GoogleAdWordsLinkedServiceTypeProperties` +- New field `OnInactiveMarkAs`, `State` in struct `HDInsightHiveActivity` +- New field `OnInactiveMarkAs`, `State` in struct `HDInsightMapReduceActivity` +- New field `OnInactiveMarkAs`, `State` in struct `HDInsightPigActivity` +- New field `OnInactiveMarkAs`, `State` in struct `HDInsightSparkActivity` +- New field `OnInactiveMarkAs`, `State` in struct `HDInsightStreamingActivity` +- New field `AdditionalColumns` in struct `HTTPReadSettings` +- New field `OnInactiveMarkAs`, `State` in struct `IfConditionActivity` +- New field `CustomProperties` in struct `IntegrationRuntimeDataFlowProperties` +- New field `OnInactiveMarkAs`, `State` in struct `LookupActivity` +- New field `DriverVersion` in struct `MongoDbAtlasLinkedServiceTypeProperties` +- New field `FormatSettings` in struct `ParquetSource` +- New field `NumberOfExternalNodes`, `NumberOfPipelineNodes` in struct `PipelineExternalComputeScaleProperties` +- New field `IsolationLevel` in struct `SQLDWSource` +- New field `IsolationLevel` in struct `SQLMISource` +- New field `IsolationLevel` in struct `SQLServerSource` +- New field `OnInactiveMarkAs`, `State` in struct `SQLServerStoredProcedureActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ScriptActivity` +- New field `SelfContainedInteractiveAuthoringEnabled` in struct `SelfHostedIntegrationRuntimeStatusTypeProperties` +- New field `SelfContainedInteractiveAuthoringEnabled` in struct `SelfHostedIntegrationRuntimeTypeProperties` +- New field `OnInactiveMarkAs`, `Policy`, `State` in struct `SetVariableActivity` +- New field `SetSystemVariable` in struct `SetVariableActivityTypeProperties` +- New field `OnInactiveMarkAs`, `State` in struct `SwitchActivity` +- New field `OnInactiveMarkAs`, `State` in struct `SynapseNotebookActivity` +- New field `ConfigurationType`, `SparkConfig`, `TargetSparkConfiguration` in struct `SynapseNotebookActivityTypeProperties` +- New field `OnInactiveMarkAs`, `State` in struct `SynapseSparkJobDefinitionActivity` +- New field `OnInactiveMarkAs`, `State` in struct `UntilActivity` +- New field `OnInactiveMarkAs`, `State` in struct `ValidationActivity` +- New field `OnInactiveMarkAs`, `State` in struct `WaitActivity` +- New field `OnInactiveMarkAs`, `State` in struct `WebActivity` +- New field `OnInactiveMarkAs`, `Policy`, `State` in struct `WebHookActivity` + + ## 3.2.1 (2023-04-14) ### Bug Fixes diff --git a/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client.go b/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client.go index 6f1c4feb912f..45db74d7f4e0 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewActivityRunsClient(subscriptionID string, credential azcore.TokenCredent // - options - ActivityRunsClientQueryByPipelineRunOptions contains the optional parameters for the ActivityRunsClient.QueryByPipelineRun // method. func (client *ActivityRunsClient) QueryByPipelineRun(ctx context.Context, resourceGroupName string, factoryName string, runID string, filterParameters RunFilterParameters, options *ActivityRunsClientQueryByPipelineRunOptions) (ActivityRunsClientQueryByPipelineRunResponse, error) { + var err error req, err := client.queryByPipelineRunCreateRequest(ctx, resourceGroupName, factoryName, runID, filterParameters, options) if err != nil { return ActivityRunsClientQueryByPipelineRunResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ActivityRunsClientQueryByPipelineRunResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ActivityRunsClientQueryByPipelineRunResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ActivityRunsClientQueryByPipelineRunResponse{}, err } - return client.queryByPipelineRunHandleResponse(resp) + resp, err := client.queryByPipelineRunHandleResponse(httpResp) + return resp, err } // queryByPipelineRunCreateRequest creates the QueryByPipelineRun request. @@ -96,7 +98,10 @@ func (client *ActivityRunsClient) queryByPipelineRunCreateRequest(ctx context.Co reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, filterParameters) + if err := runtime.MarshalAsJSON(req, filterParameters); err != nil { + return nil, err + } + return req, nil } // queryByPipelineRunHandleResponse handles the QueryByPipelineRun response. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client_example_test.go deleted file mode 100644 index 8c896998a728..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/activityruns_client_example_test.go +++ /dev/null @@ -1,138 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "time" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ActivityRuns_QueryByPipelineRun.json -func ExampleActivityRunsClient_QueryByPipelineRun() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewActivityRunsClient().QueryByPipelineRun(ctx, "exampleResourceGroup", "exampleFactoryName", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", armdatafactory.RunFilterParameters{ - LastUpdatedAfter: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:36:44.3345758Z"); return t }()), - LastUpdatedBefore: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:49:48.3686473Z"); return t }()), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ActivityRunsQueryResponse = armdatafactory.ActivityRunsQueryResponse{ - // Value: []*armdatafactory.ActivityRun{ - // { - // AdditionalProperties: map[string]any{ - // "retryAttempt": nil, - // "userProperties": map[string]any{ - // }, - // }, - // ActivityName: to.Ptr("ExampleForeachActivity"), - // ActivityRunEnd: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:38:11.5445431Z"); return t}()), - // ActivityRunID: to.Ptr("f30c5514-fb85-43ed-9fa4-768d42e58680"), - // ActivityRunStart: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:37:49.4804925Z"); return t}()), - // ActivityType: to.Ptr("ForEach"), - // DurationInMs: to.Ptr[int32](22064), - // Error: map[string]any{ - // "errorCode": "", - // "failureType": "", - // "message": "", - // "target": "ExampleForeachActivity", - // }, - // Input: map[string]any{ - // }, - // LinkedServiceName: to.Ptr(""), - // Output: map[string]any{ - // }, - // PipelineName: to.Ptr("examplePipeline"), - // PipelineRunID: to.Ptr("2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"), - // Status: to.Ptr("Succeeded"), - // }, - // { - // AdditionalProperties: map[string]any{ - // "retryAttempt": nil, - // "userProperties": map[string]any{ - // }, - // }, - // ActivityName: to.Ptr("ExampleCopyActivity"), - // ActivityRunEnd: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:38:07.4188923Z"); return t}()), - // ActivityRunID: to.Ptr("a96678c8-7167-4f00-b629-afccfbad4e51"), - // ActivityRunStart: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:37:50.2460952Z"); return t}()), - // ActivityType: to.Ptr("Copy"), - // DurationInMs: to.Ptr[int32](17172), - // Error: map[string]any{ - // "errorCode": "", - // "failureType": "", - // "message": "", - // "target": "ExampleCopyActivity", - // }, - // Input: map[string]any{ - // "dataIntegrationUnits": float64(32), - // "sink":map[string]any{ - // "type": "BlobSink", - // }, - // "source":map[string]any{ - // "type": "BlobSource", - // }, - // }, - // LinkedServiceName: to.Ptr(""), - // Output: map[string]any{ - // "copyDuration": float64(6), - // "dataRead": float64(142000), - // "dataWritten": float64(142000), - // "effectiveIntegrationRuntime": "DefaultIntegrationRuntime (East US)", - // "errors":[]any{ - // }, - // "executionDetails":[]any{ - // map[string]any{ - // "detailedDurations":map[string]any{ - // "queuingDuration": float64(4), - // "transferDuration": float64(2), - // }, - // "duration": float64(6), - // "sink":map[string]any{ - // "type": "AzureBlob", - // }, - // "source":map[string]any{ - // "type": "AzureBlob", - // }, - // "start": "2018-06-16T00:37:50.68834Z", - // "status": "Succeeded", - // "usedCloudDataMovementUnits": float64(4), - // "usedParallelCopies": float64(1), - // }, - // }, - // "filesRead": float64(1), - // "filesWritten": float64(1), - // "throughput": float64(23.112), - // "usedCloudDataMovementUnits": float64(4), - // "usedParallelCopies": float64(1), - // }, - // PipelineName: to.Ptr("examplePipeline"), - // PipelineRunID: to.Ptr("2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"), - // Status: to.Ptr("Succeeded"), - // }}, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/autorest.md b/sdk/resourcemanager/datafactory/armdatafactory/autorest.md index b49391439cbb..85bfdc443a24 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/autorest.md +++ b/sdk/resourcemanager/datafactory/armdatafactory/autorest.md @@ -5,9 +5,9 @@ ``` yaml azure-arm: true require: -- https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/readme.md -- https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/readme.go.md +- /mnt/vss/_work/1/s/azure-rest-api-specs/specification/datafactory/resource-manager/readme.md +- /mnt/vss/_work/1/s/azure-rest-api-specs/specification/datafactory/resource-manager/readme.go.md license-header: MICROSOFT_MIT_NO_VERSION -module-version: 3.2.1 +module-version: 4.0.0 ``` \ No newline at end of file diff --git a/sdk/resourcemanager/datafactory/armdatafactory/changedatacapture_client.go b/sdk/resourcemanager/datafactory/armdatafactory/changedatacapture_client.go new file mode 100644 index 000000000000..ff483e947f13 --- /dev/null +++ b/sdk/resourcemanager/datafactory/armdatafactory/changedatacapture_client.go @@ -0,0 +1,487 @@ +//go:build go1.18 +// +build go1.18 + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +package armdatafactory + +import ( + "context" + "errors" + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy" + "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime" + "net/http" + "net/url" + "strings" +) + +// ChangeDataCaptureClient contains the methods for the ChangeDataCapture group. +// Don't use this type directly, use NewChangeDataCaptureClient() instead. +type ChangeDataCaptureClient struct { + internal *arm.Client + subscriptionID string +} + +// NewChangeDataCaptureClient creates a new instance of ChangeDataCaptureClient with the specified values. +// - subscriptionID - The subscription identifier. +// - credential - used to authorize requests. Usually a credential from azidentity. +// - options - pass nil to accept the default values. +func NewChangeDataCaptureClient(subscriptionID string, credential azcore.TokenCredential, options *arm.ClientOptions) (*ChangeDataCaptureClient, error) { + cl, err := arm.NewClient(moduleName+".ChangeDataCaptureClient", moduleVersion, credential, options) + if err != nil { + return nil, err + } + client := &ChangeDataCaptureClient{ + subscriptionID: subscriptionID, + internal: cl, + } + return client, nil +} + +// CreateOrUpdate - Creates or updates a change data capture resource. +// If the operation fails it returns an *azcore.ResponseError type. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - changeDataCaptureName - The change data capture name. +// - changeDataCapture - Change data capture resource definition. +// - options - ChangeDataCaptureClientCreateOrUpdateOptions contains the optional parameters for the ChangeDataCaptureClient.CreateOrUpdate +// method. +func (client *ChangeDataCaptureClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, changeDataCapture ChangeDataCaptureResource, options *ChangeDataCaptureClientCreateOrUpdateOptions) (ChangeDataCaptureClientCreateOrUpdateResponse, error) { + var err error + req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, changeDataCaptureName, changeDataCapture, options) + if err != nil { + return ChangeDataCaptureClientCreateOrUpdateResponse{}, err + } + httpResp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientCreateOrUpdateResponse{}, err + } + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ChangeDataCaptureClientCreateOrUpdateResponse{}, err + } + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err +} + +// createOrUpdateCreateRequest creates the CreateOrUpdate request. +func (client *ChangeDataCaptureClient) createOrUpdateCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, changeDataCapture ChangeDataCaptureResource, options *ChangeDataCaptureClientCreateOrUpdateOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + if changeDataCaptureName == "" { + return nil, errors.New("parameter changeDataCaptureName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{changeDataCaptureName}", url.PathEscape(changeDataCaptureName)) + req, err := runtime.NewRequest(ctx, http.MethodPut, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + if options != nil && options.IfMatch != nil { + req.Raw().Header["If-Match"] = []string{*options.IfMatch} + } + req.Raw().Header["Accept"] = []string{"application/json"} + if err := runtime.MarshalAsJSON(req, changeDataCapture); err != nil { + return nil, err + } + return req, nil +} + +// createOrUpdateHandleResponse handles the CreateOrUpdate response. +func (client *ChangeDataCaptureClient) createOrUpdateHandleResponse(resp *http.Response) (ChangeDataCaptureClientCreateOrUpdateResponse, error) { + result := ChangeDataCaptureClientCreateOrUpdateResponse{} + if err := runtime.UnmarshalAsJSON(resp, &result.ChangeDataCaptureResource); err != nil { + return ChangeDataCaptureClientCreateOrUpdateResponse{}, err + } + return result, nil +} + +// Delete - Deletes a change data capture. +// If the operation fails it returns an *azcore.ResponseError type. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - changeDataCaptureName - The change data capture name. +// - options - ChangeDataCaptureClientDeleteOptions contains the optional parameters for the ChangeDataCaptureClient.Delete +// method. +func (client *ChangeDataCaptureClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientDeleteOptions) (ChangeDataCaptureClientDeleteResponse, error) { + var err error + req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, changeDataCaptureName, options) + if err != nil { + return ChangeDataCaptureClientDeleteResponse{}, err + } + httpResp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientDeleteResponse{}, err + } + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return ChangeDataCaptureClientDeleteResponse{}, err + } + return ChangeDataCaptureClientDeleteResponse{}, nil +} + +// deleteCreateRequest creates the Delete request. +func (client *ChangeDataCaptureClient) deleteCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientDeleteOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + if changeDataCaptureName == "" { + return nil, errors.New("parameter changeDataCaptureName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{changeDataCaptureName}", url.PathEscape(changeDataCaptureName)) + req, err := runtime.NewRequest(ctx, http.MethodDelete, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + req.Raw().Header["Accept"] = []string{"application/json"} + return req, nil +} + +// Get - Gets a change data capture. +// If the operation fails it returns an *azcore.ResponseError type. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - changeDataCaptureName - The change data capture name. +// - options - ChangeDataCaptureClientGetOptions contains the optional parameters for the ChangeDataCaptureClient.Get method. +func (client *ChangeDataCaptureClient) Get(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientGetOptions) (ChangeDataCaptureClientGetResponse, error) { + var err error + req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, changeDataCaptureName, options) + if err != nil { + return ChangeDataCaptureClientGetResponse{}, err + } + httpResp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientGetResponse{}, err + } + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ChangeDataCaptureClientGetResponse{}, err + } + resp, err := client.getHandleResponse(httpResp) + return resp, err +} + +// getCreateRequest creates the Get request. +func (client *ChangeDataCaptureClient) getCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientGetOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + if changeDataCaptureName == "" { + return nil, errors.New("parameter changeDataCaptureName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{changeDataCaptureName}", url.PathEscape(changeDataCaptureName)) + req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + if options != nil && options.IfNoneMatch != nil { + req.Raw().Header["If-None-Match"] = []string{*options.IfNoneMatch} + } + req.Raw().Header["Accept"] = []string{"application/json"} + return req, nil +} + +// getHandleResponse handles the Get response. +func (client *ChangeDataCaptureClient) getHandleResponse(resp *http.Response) (ChangeDataCaptureClientGetResponse, error) { + result := ChangeDataCaptureClientGetResponse{} + if err := runtime.UnmarshalAsJSON(resp, &result.ChangeDataCaptureResource); err != nil { + return ChangeDataCaptureClientGetResponse{}, err + } + return result, nil +} + +// NewListByFactoryPager - Lists all resources of type change data capture. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - options - ChangeDataCaptureClientListByFactoryOptions contains the optional parameters for the ChangeDataCaptureClient.NewListByFactoryPager +// method. +func (client *ChangeDataCaptureClient) NewListByFactoryPager(resourceGroupName string, factoryName string, options *ChangeDataCaptureClientListByFactoryOptions) *runtime.Pager[ChangeDataCaptureClientListByFactoryResponse] { + return runtime.NewPager(runtime.PagingHandler[ChangeDataCaptureClientListByFactoryResponse]{ + More: func(page ChangeDataCaptureClientListByFactoryResponse) bool { + return page.NextLink != nil && len(*page.NextLink) > 0 + }, + Fetcher: func(ctx context.Context, page *ChangeDataCaptureClientListByFactoryResponse) (ChangeDataCaptureClientListByFactoryResponse, error) { + var req *policy.Request + var err error + if page == nil { + req, err = client.listByFactoryCreateRequest(ctx, resourceGroupName, factoryName, options) + } else { + req, err = runtime.NewRequest(ctx, http.MethodGet, *page.NextLink) + } + if err != nil { + return ChangeDataCaptureClientListByFactoryResponse{}, err + } + resp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientListByFactoryResponse{}, err + } + if !runtime.HasStatusCode(resp, http.StatusOK) { + return ChangeDataCaptureClientListByFactoryResponse{}, runtime.NewResponseError(resp) + } + return client.listByFactoryHandleResponse(resp) + }, + }) +} + +// listByFactoryCreateRequest creates the ListByFactory request. +func (client *ChangeDataCaptureClient) listByFactoryCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, options *ChangeDataCaptureClientListByFactoryOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + req.Raw().Header["Accept"] = []string{"application/json"} + return req, nil +} + +// listByFactoryHandleResponse handles the ListByFactory response. +func (client *ChangeDataCaptureClient) listByFactoryHandleResponse(resp *http.Response) (ChangeDataCaptureClientListByFactoryResponse, error) { + result := ChangeDataCaptureClientListByFactoryResponse{} + if err := runtime.UnmarshalAsJSON(resp, &result.ChangeDataCaptureListResponse); err != nil { + return ChangeDataCaptureClientListByFactoryResponse{}, err + } + return result, nil +} + +// Start - Starts a change data capture. +// If the operation fails it returns an *azcore.ResponseError type. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - changeDataCaptureName - The change data capture name. +// - options - ChangeDataCaptureClientStartOptions contains the optional parameters for the ChangeDataCaptureClient.Start method. +func (client *ChangeDataCaptureClient) Start(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientStartOptions) (ChangeDataCaptureClientStartResponse, error) { + var err error + req, err := client.startCreateRequest(ctx, resourceGroupName, factoryName, changeDataCaptureName, options) + if err != nil { + return ChangeDataCaptureClientStartResponse{}, err + } + httpResp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientStartResponse{}, err + } + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ChangeDataCaptureClientStartResponse{}, err + } + return ChangeDataCaptureClientStartResponse{}, nil +} + +// startCreateRequest creates the Start request. +func (client *ChangeDataCaptureClient) startCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientStartOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/start" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + if changeDataCaptureName == "" { + return nil, errors.New("parameter changeDataCaptureName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{changeDataCaptureName}", url.PathEscape(changeDataCaptureName)) + req, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + req.Raw().Header["Accept"] = []string{"application/json"} + return req, nil +} + +// Status - Gets the current status for the change data capture resource. +// If the operation fails it returns an *azcore.ResponseError type. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - changeDataCaptureName - The change data capture name. +// - options - ChangeDataCaptureClientStatusOptions contains the optional parameters for the ChangeDataCaptureClient.Status +// method. +func (client *ChangeDataCaptureClient) Status(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientStatusOptions) (ChangeDataCaptureClientStatusResponse, error) { + var err error + req, err := client.statusCreateRequest(ctx, resourceGroupName, factoryName, changeDataCaptureName, options) + if err != nil { + return ChangeDataCaptureClientStatusResponse{}, err + } + httpResp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientStatusResponse{}, err + } + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ChangeDataCaptureClientStatusResponse{}, err + } + resp, err := client.statusHandleResponse(httpResp) + return resp, err +} + +// statusCreateRequest creates the Status request. +func (client *ChangeDataCaptureClient) statusCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientStatusOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/status" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + if changeDataCaptureName == "" { + return nil, errors.New("parameter changeDataCaptureName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{changeDataCaptureName}", url.PathEscape(changeDataCaptureName)) + req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + req.Raw().Header["Accept"] = []string{"application/json"} + return req, nil +} + +// statusHandleResponse handles the Status response. +func (client *ChangeDataCaptureClient) statusHandleResponse(resp *http.Response) (ChangeDataCaptureClientStatusResponse, error) { + result := ChangeDataCaptureClientStatusResponse{} + if err := runtime.UnmarshalAsJSON(resp, &result.Value); err != nil { + return ChangeDataCaptureClientStatusResponse{}, err + } + return result, nil +} + +// Stop - Stops a change data capture. +// If the operation fails it returns an *azcore.ResponseError type. +// +// Generated from API version 2018-06-01 +// - resourceGroupName - The resource group name. +// - factoryName - The factory name. +// - changeDataCaptureName - The change data capture name. +// - options - ChangeDataCaptureClientStopOptions contains the optional parameters for the ChangeDataCaptureClient.Stop method. +func (client *ChangeDataCaptureClient) Stop(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientStopOptions) (ChangeDataCaptureClientStopResponse, error) { + var err error + req, err := client.stopCreateRequest(ctx, resourceGroupName, factoryName, changeDataCaptureName, options) + if err != nil { + return ChangeDataCaptureClientStopResponse{}, err + } + httpResp, err := client.internal.Pipeline().Do(req) + if err != nil { + return ChangeDataCaptureClientStopResponse{}, err + } + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ChangeDataCaptureClientStopResponse{}, err + } + return ChangeDataCaptureClientStopResponse{}, nil +} + +// stopCreateRequest creates the Stop request. +func (client *ChangeDataCaptureClient) stopCreateRequest(ctx context.Context, resourceGroupName string, factoryName string, changeDataCaptureName string, options *ChangeDataCaptureClientStopOptions) (*policy.Request, error) { + urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/adfcdcs/{changeDataCaptureName}/stop" + if client.subscriptionID == "" { + return nil, errors.New("parameter client.subscriptionID cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) + if resourceGroupName == "" { + return nil, errors.New("parameter resourceGroupName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) + if factoryName == "" { + return nil, errors.New("parameter factoryName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{factoryName}", url.PathEscape(factoryName)) + if changeDataCaptureName == "" { + return nil, errors.New("parameter changeDataCaptureName cannot be empty") + } + urlPath = strings.ReplaceAll(urlPath, "{changeDataCaptureName}", url.PathEscape(changeDataCaptureName)) + req, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.internal.Endpoint(), urlPath)) + if err != nil { + return nil, err + } + reqQP := req.Raw().URL.Query() + reqQP.Set("api-version", "2018-06-01") + req.Raw().URL.RawQuery = reqQP.Encode() + req.Raw().Header["Accept"] = []string{"application/json"} + return req, nil +} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/client_factory.go b/sdk/resourcemanager/datafactory/armdatafactory/client_factory.go index 608758a4db06..2da40bb5b237 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/client_factory.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/client_factory.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -38,93 +37,93 @@ func NewClientFactory(subscriptionID string, credential azcore.TokenCredential, }, nil } -func (c *ClientFactory) NewOperationsClient() *OperationsClient { - subClient, _ := NewOperationsClient(c.credential, c.options) +func (c *ClientFactory) NewActivityRunsClient() *ActivityRunsClient { + subClient, _ := NewActivityRunsClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewFactoriesClient() *FactoriesClient { - subClient, _ := NewFactoriesClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewChangeDataCaptureClient() *ChangeDataCaptureClient { + subClient, _ := NewChangeDataCaptureClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewExposureControlClient() *ExposureControlClient { - subClient, _ := NewExposureControlClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewCredentialOperationsClient() *CredentialOperationsClient { + subClient, _ := NewCredentialOperationsClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewIntegrationRuntimesClient() *IntegrationRuntimesClient { - subClient, _ := NewIntegrationRuntimesClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewDataFlowDebugSessionClient() *DataFlowDebugSessionClient { + subClient, _ := NewDataFlowDebugSessionClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewIntegrationRuntimeObjectMetadataClient() *IntegrationRuntimeObjectMetadataClient { - subClient, _ := NewIntegrationRuntimeObjectMetadataClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewDataFlowsClient() *DataFlowsClient { + subClient, _ := NewDataFlowsClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewIntegrationRuntimeNodesClient() *IntegrationRuntimeNodesClient { - subClient, _ := NewIntegrationRuntimeNodesClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewDatasetsClient() *DatasetsClient { + subClient, _ := NewDatasetsClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewLinkedServicesClient() *LinkedServicesClient { - subClient, _ := NewLinkedServicesClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewExposureControlClient() *ExposureControlClient { + subClient, _ := NewExposureControlClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewDatasetsClient() *DatasetsClient { - subClient, _ := NewDatasetsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewFactoriesClient() *FactoriesClient { + subClient, _ := NewFactoriesClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewPipelinesClient() *PipelinesClient { - subClient, _ := NewPipelinesClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewGlobalParametersClient() *GlobalParametersClient { + subClient, _ := NewGlobalParametersClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewPipelineRunsClient() *PipelineRunsClient { - subClient, _ := NewPipelineRunsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewIntegrationRuntimeNodesClient() *IntegrationRuntimeNodesClient { + subClient, _ := NewIntegrationRuntimeNodesClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewActivityRunsClient() *ActivityRunsClient { - subClient, _ := NewActivityRunsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewIntegrationRuntimeObjectMetadataClient() *IntegrationRuntimeObjectMetadataClient { + subClient, _ := NewIntegrationRuntimeObjectMetadataClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewTriggersClient() *TriggersClient { - subClient, _ := NewTriggersClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewIntegrationRuntimesClient() *IntegrationRuntimesClient { + subClient, _ := NewIntegrationRuntimesClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewTriggerRunsClient() *TriggerRunsClient { - subClient, _ := NewTriggerRunsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewLinkedServicesClient() *LinkedServicesClient { + subClient, _ := NewLinkedServicesClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewDataFlowsClient() *DataFlowsClient { - subClient, _ := NewDataFlowsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewManagedPrivateEndpointsClient() *ManagedPrivateEndpointsClient { + subClient, _ := NewManagedPrivateEndpointsClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewDataFlowDebugSessionClient() *DataFlowDebugSessionClient { - subClient, _ := NewDataFlowDebugSessionClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewManagedVirtualNetworksClient() *ManagedVirtualNetworksClient { + subClient, _ := NewManagedVirtualNetworksClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewManagedVirtualNetworksClient() *ManagedVirtualNetworksClient { - subClient, _ := NewManagedVirtualNetworksClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewOperationsClient() *OperationsClient { + subClient, _ := NewOperationsClient(c.credential, c.options) return subClient } -func (c *ClientFactory) NewManagedPrivateEndpointsClient() *ManagedPrivateEndpointsClient { - subClient, _ := NewManagedPrivateEndpointsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewPipelineRunsClient() *PipelineRunsClient { + subClient, _ := NewPipelineRunsClient(c.subscriptionID, c.credential, c.options) return subClient } -func (c *ClientFactory) NewCredentialOperationsClient() *CredentialOperationsClient { - subClient, _ := NewCredentialOperationsClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewPipelinesClient() *PipelinesClient { + subClient, _ := NewPipelinesClient(c.subscriptionID, c.credential, c.options) return subClient } @@ -143,7 +142,12 @@ func (c *ClientFactory) NewPrivateLinkResourcesClient() *PrivateLinkResourcesCli return subClient } -func (c *ClientFactory) NewGlobalParametersClient() *GlobalParametersClient { - subClient, _ := NewGlobalParametersClient(c.subscriptionID, c.credential, c.options) +func (c *ClientFactory) NewTriggerRunsClient() *TriggerRunsClient { + subClient, _ := NewTriggerRunsClient(c.subscriptionID, c.credential, c.options) + return subClient +} + +func (c *ClientFactory) NewTriggersClient() *TriggersClient { + subClient, _ := NewTriggersClient(c.subscriptionID, c.credential, c.options) return subClient } diff --git a/sdk/resourcemanager/datafactory/armdatafactory/constants.go b/sdk/resourcemanager/datafactory/armdatafactory/constants.go index e2253f4bf73f..76733ad9831c 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/constants.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/constants.go @@ -3,17 +3,51 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory const ( moduleName = "armdatafactory" - moduleVersion = "v3.2.1" + moduleVersion = "v4.0.0" ) +// ActivityOnInactiveMarkAs - Status result of the activity when the state is set to Inactive. This is an optional property +// and if not provided when the activity is inactive, the status will be Succeeded by default. +type ActivityOnInactiveMarkAs string + +const ( + ActivityOnInactiveMarkAsFailed ActivityOnInactiveMarkAs = "Failed" + ActivityOnInactiveMarkAsSkipped ActivityOnInactiveMarkAs = "Skipped" + ActivityOnInactiveMarkAsSucceeded ActivityOnInactiveMarkAs = "Succeeded" +) + +// PossibleActivityOnInactiveMarkAsValues returns the possible values for the ActivityOnInactiveMarkAs const type. +func PossibleActivityOnInactiveMarkAsValues() []ActivityOnInactiveMarkAs { + return []ActivityOnInactiveMarkAs{ + ActivityOnInactiveMarkAsFailed, + ActivityOnInactiveMarkAsSkipped, + ActivityOnInactiveMarkAsSucceeded, + } +} + +// ActivityState - Activity state. This is an optional property and if not provided, the state will be Active by default. +type ActivityState string + +const ( + ActivityStateActive ActivityState = "Active" + ActivityStateInactive ActivityState = "Inactive" +) + +// PossibleActivityStateValues returns the possible values for the ActivityState const type. +func PossibleActivityStateValues() []ActivityState { + return []ActivityState{ + ActivityStateActive, + ActivityStateInactive, + } +} + // AzureFunctionActivityMethod - The list of HTTP methods supported by a AzureFunctionActivity. type AzureFunctionActivityMethod string @@ -160,8 +194,21 @@ func PossibleConfigurationTypeValues() []ConfigurationType { } } -// CosmosDbConnectionMode - The connection mode used to access CosmosDB account. Type: string (or Expression with resultType -// string). +// ConnectionType - Type of connection via linked service or dataset. +type ConnectionType string + +const ( + ConnectionTypeLinkedservicetype ConnectionType = "linkedservicetype" +) + +// PossibleConnectionTypeValues returns the possible values for the ConnectionType const type. +func PossibleConnectionTypeValues() []ConnectionType { + return []ConnectionType{ + ConnectionTypeLinkedservicetype, + } +} + +// CosmosDbConnectionMode - The connection mode used to access CosmosDB account. Type: string. type CosmosDbConnectionMode string const ( @@ -177,24 +224,6 @@ func PossibleCosmosDbConnectionModeValues() []CosmosDbConnectionMode { } } -// CosmosDbServicePrincipalCredentialType - The service principal credential type to use in Server-To-Server authentication. -// 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with -// resultType string). -type CosmosDbServicePrincipalCredentialType string - -const ( - CosmosDbServicePrincipalCredentialTypeServicePrincipalCert CosmosDbServicePrincipalCredentialType = "ServicePrincipalCert" - CosmosDbServicePrincipalCredentialTypeServicePrincipalKey CosmosDbServicePrincipalCredentialType = "ServicePrincipalKey" -) - -// PossibleCosmosDbServicePrincipalCredentialTypeValues returns the possible values for the CosmosDbServicePrincipalCredentialType const type. -func PossibleCosmosDbServicePrincipalCredentialTypeValues() []CosmosDbServicePrincipalCredentialType { - return []CosmosDbServicePrincipalCredentialType{ - CosmosDbServicePrincipalCredentialTypeServicePrincipalCert, - CosmosDbServicePrincipalCredentialTypeServicePrincipalKey, - } -} - // CredentialReferenceType - Credential reference type. type CredentialReferenceType string @@ -277,50 +306,50 @@ func PossibleDatasetReferenceTypeValues() []DatasetReferenceType { type DayOfWeek string const ( - DayOfWeekSunday DayOfWeek = "Sunday" + DayOfWeekFriday DayOfWeek = "Friday" DayOfWeekMonday DayOfWeek = "Monday" + DayOfWeekSaturday DayOfWeek = "Saturday" + DayOfWeekSunday DayOfWeek = "Sunday" + DayOfWeekThursday DayOfWeek = "Thursday" DayOfWeekTuesday DayOfWeek = "Tuesday" DayOfWeekWednesday DayOfWeek = "Wednesday" - DayOfWeekThursday DayOfWeek = "Thursday" - DayOfWeekFriday DayOfWeek = "Friday" - DayOfWeekSaturday DayOfWeek = "Saturday" ) // PossibleDayOfWeekValues returns the possible values for the DayOfWeek const type. func PossibleDayOfWeekValues() []DayOfWeek { return []DayOfWeek{ - DayOfWeekSunday, + DayOfWeekFriday, DayOfWeekMonday, + DayOfWeekSaturday, + DayOfWeekSunday, + DayOfWeekThursday, DayOfWeekTuesday, DayOfWeekWednesday, - DayOfWeekThursday, - DayOfWeekFriday, - DayOfWeekSaturday, } } type DaysOfWeek string const ( - DaysOfWeekSunday DaysOfWeek = "Sunday" + DaysOfWeekFriday DaysOfWeek = "Friday" DaysOfWeekMonday DaysOfWeek = "Monday" + DaysOfWeekSaturday DaysOfWeek = "Saturday" + DaysOfWeekSunday DaysOfWeek = "Sunday" + DaysOfWeekThursday DaysOfWeek = "Thursday" DaysOfWeekTuesday DaysOfWeek = "Tuesday" DaysOfWeekWednesday DaysOfWeek = "Wednesday" - DaysOfWeekThursday DaysOfWeek = "Thursday" - DaysOfWeekFriday DaysOfWeek = "Friday" - DaysOfWeekSaturday DaysOfWeek = "Saturday" ) // PossibleDaysOfWeekValues returns the possible values for the DaysOfWeek const type. func PossibleDaysOfWeekValues() []DaysOfWeek { return []DaysOfWeek{ - DaysOfWeekSunday, + DaysOfWeekFriday, DaysOfWeekMonday, + DaysOfWeekSaturday, + DaysOfWeekSunday, + DaysOfWeekThursday, DaysOfWeekTuesday, DaysOfWeekWednesday, - DaysOfWeekThursday, - DaysOfWeekFriday, - DaysOfWeekSaturday, } } @@ -425,6 +454,24 @@ func PossibleFactoryIdentityTypeValues() []FactoryIdentityType { } } +// FrequencyType - Frequency of period in terms of 'Hour', 'Minute' or 'Second'. +type FrequencyType string + +const ( + FrequencyTypeHour FrequencyType = "Hour" + FrequencyTypeMinute FrequencyType = "Minute" + FrequencyTypeSecond FrequencyType = "Second" +) + +// PossibleFrequencyTypeValues returns the possible values for the FrequencyType const type. +func PossibleFrequencyTypeValues() []FrequencyType { + return []FrequencyType{ + FrequencyTypeHour, + FrequencyTypeMinute, + FrequencyTypeSecond, + } +} + // FtpAuthenticationType - The authentication type to be used to connect to the FTP server. type FtpAuthenticationType string @@ -877,6 +924,24 @@ func PossibleManagedVirtualNetworkReferenceTypeValues() []ManagedVirtualNetworkR } } +// MappingType - Type of the CDC attribute mapping. Note: 'Advanced' mapping type is also saved as 'Derived'. +type MappingType string + +const ( + MappingTypeAggregate MappingType = "Aggregate" + MappingTypeDerived MappingType = "Derived" + MappingTypeDirect MappingType = "Direct" +) + +// PossibleMappingTypeValues returns the possible values for the MappingType const type. +func PossibleMappingTypeValues() []MappingType { + return []MappingType{ + MappingTypeAggregate, + MappingTypeDerived, + MappingTypeDirect, + } +} + // MongoDbAuthenticationType - The authentication type to be used to connect to the MongoDB database. type MongoDbAuthenticationType string @@ -1225,8 +1290,7 @@ func PossibleRunQueryOrderByFieldValues() []RunQueryOrderByField { } } -// SQLAlwaysEncryptedAkvAuthType - Sql always encrypted AKV authentication type. Type: string (or Expression with resultType -// string). +// SQLAlwaysEncryptedAkvAuthType - Sql always encrypted AKV authentication type. Type: string. type SQLAlwaysEncryptedAkvAuthType string const ( @@ -1260,22 +1324,6 @@ func PossibleSalesforceSinkWriteBehaviorValues() []SalesforceSinkWriteBehavior { } } -// SalesforceSourceReadBehavior - The read behavior for the operation. Default is Query. -type SalesforceSourceReadBehavior string - -const ( - SalesforceSourceReadBehaviorQuery SalesforceSourceReadBehavior = "Query" - SalesforceSourceReadBehaviorQueryAll SalesforceSourceReadBehavior = "QueryAll" -) - -// PossibleSalesforceSourceReadBehaviorValues returns the possible values for the SalesforceSourceReadBehavior const type. -func PossibleSalesforceSourceReadBehaviorValues() []SalesforceSourceReadBehavior { - return []SalesforceSourceReadBehavior{ - SalesforceSourceReadBehaviorQuery, - SalesforceSourceReadBehaviorQueryAll, - } -} - // SapCloudForCustomerSinkWriteBehavior - The write behavior for the operation. Default is 'Insert'. type SapCloudForCustomerSinkWriteBehavior string diff --git a/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client.go b/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client.go index 916de85666f8..a4e200f792a1 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewCredentialOperationsClient(subscriptionID string, credential azcore.Toke // - options - CredentialOperationsClientCreateOrUpdateOptions contains the optional parameters for the CredentialOperationsClient.CreateOrUpdate // method. func (client *CredentialOperationsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, credentialName string, credential ManagedIdentityCredentialResource, options *CredentialOperationsClientCreateOrUpdateOptions) (CredentialOperationsClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, credentialName, credential, options) if err != nil { return CredentialOperationsClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return CredentialOperationsClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return CredentialOperationsClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return CredentialOperationsClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -99,7 +101,10 @@ func (client *CredentialOperationsClient) createOrUpdateCreateRequest(ctx contex req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, credential) + if err := runtime.MarshalAsJSON(req, credential); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -121,16 +126,18 @@ func (client *CredentialOperationsClient) createOrUpdateHandleResponse(resp *htt // - options - CredentialOperationsClientDeleteOptions contains the optional parameters for the CredentialOperationsClient.Delete // method. func (client *CredentialOperationsClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, credentialName string, options *CredentialOperationsClientDeleteOptions) (CredentialOperationsClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, credentialName, options) if err != nil { return CredentialOperationsClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return CredentialOperationsClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return CredentialOperationsClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return CredentialOperationsClientDeleteResponse{}, err } return CredentialOperationsClientDeleteResponse{}, nil } @@ -175,18 +182,21 @@ func (client *CredentialOperationsClient) deleteCreateRequest(ctx context.Contex // - options - CredentialOperationsClientGetOptions contains the optional parameters for the CredentialOperationsClient.Get // method. func (client *CredentialOperationsClient) Get(ctx context.Context, resourceGroupName string, factoryName string, credentialName string, options *CredentialOperationsClientGetOptions) (CredentialOperationsClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, credentialName, options) if err != nil { return CredentialOperationsClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return CredentialOperationsClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return CredentialOperationsClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return CredentialOperationsClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client_example_test.go deleted file mode 100644 index 29b3d2208504..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/credentialoperations_client_example_test.go +++ /dev/null @@ -1,149 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_ListByFactory.json -func ExampleCredentialOperationsClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewCredentialOperationsClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.CredentialListResponse = armdatafactory.CredentialListResponse{ - // Value: []*armdatafactory.ManagedIdentityCredentialResource{ - // { - // Name: to.Ptr("exampleLinkedService"), - // Type: to.Ptr("Microsoft.DataFactory/factories/credentials"), - // Etag: to.Ptr("0a0064d4-0000-0000-0000-5b245bd00000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/credentials/exampleCredential"), - // Properties: &armdatafactory.ManagedIdentityCredential{ - // Type: to.Ptr("ManagedIdentity"), - // Description: to.Ptr("Example description"), - // TypeProperties: &armdatafactory.ManagedIdentityTypeProperties{ - // ResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami"), - // }, - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Create.json -func ExampleCredentialOperationsClient_CreateOrUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewCredentialOperationsClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleCredential", armdatafactory.ManagedIdentityCredentialResource{ - Properties: &armdatafactory.ManagedIdentityCredential{ - Type: to.Ptr("ManagedIdentity"), - TypeProperties: &armdatafactory.ManagedIdentityTypeProperties{ - ResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami"), - }, - }, - }, &armdatafactory.CredentialOperationsClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ManagedIdentityCredentialResource = armdatafactory.ManagedIdentityCredentialResource{ - // Name: to.Ptr("exampleCredential"), - // Type: to.Ptr("Microsoft.DataFactory/factories/credentials"), - // Etag: to.Ptr("0a0062d4-0000-0000-0000-5b245bcf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/credentials/exampleCredential"), - // Properties: &armdatafactory.ManagedIdentityCredential{ - // Type: to.Ptr("ManagedIdentity"), - // TypeProperties: &armdatafactory.ManagedIdentityTypeProperties{ - // ResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami"), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Get.json -func ExampleCredentialOperationsClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewCredentialOperationsClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleCredential", &armdatafactory.CredentialOperationsClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ManagedIdentityCredentialResource = armdatafactory.ManagedIdentityCredentialResource{ - // Name: to.Ptr("exampleLinkedService"), - // Type: to.Ptr("Microsoft.DataFactory/factories/credentials"), - // Etag: to.Ptr("1500474f-0000-0200-0000-5cbe090d0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/credentials/exampleCredential"), - // Properties: &armdatafactory.ManagedIdentityCredential{ - // Type: to.Ptr("ManagedIdentity"), - // Description: to.Ptr("Example description"), - // TypeProperties: &armdatafactory.ManagedIdentityTypeProperties{ - // ResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourcegroups/exampleResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/exampleUami"), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Credentials_Delete.json -func ExampleCredentialOperationsClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewCredentialOperationsClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleCredential", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/datafactory_live_test.go b/sdk/resourcemanager/datafactory/armdatafactory/datafactory_live_test.go index 693a9a868486..c337bb1972b6 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/datafactory_live_test.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/datafactory_live_test.go @@ -16,7 +16,7 @@ import ( "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" "github.com/Azure/azure-sdk-for-go/sdk/internal/recording" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" + "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v4" "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal/testutil" "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources" "github.com/stretchr/testify/suite" diff --git a/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client.go b/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client.go index 840452421ef1..85534fa59a64 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,18 +53,21 @@ func NewDataFlowDebugSessionClient(subscriptionID string, credential azcore.Toke // - options - DataFlowDebugSessionClientAddDataFlowOptions contains the optional parameters for the DataFlowDebugSessionClient.AddDataFlow // method. func (client *DataFlowDebugSessionClient) AddDataFlow(ctx context.Context, resourceGroupName string, factoryName string, request DataFlowDebugPackage, options *DataFlowDebugSessionClientAddDataFlowOptions) (DataFlowDebugSessionClientAddDataFlowResponse, error) { + var err error req, err := client.addDataFlowCreateRequest(ctx, resourceGroupName, factoryName, request, options) if err != nil { return DataFlowDebugSessionClientAddDataFlowResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DataFlowDebugSessionClientAddDataFlowResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return DataFlowDebugSessionClientAddDataFlowResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return DataFlowDebugSessionClientAddDataFlowResponse{}, err } - return client.addDataFlowHandleResponse(resp) + resp, err := client.addDataFlowHandleResponse(httpResp) + return resp, err } // addDataFlowCreateRequest creates the AddDataFlow request. @@ -91,7 +93,10 @@ func (client *DataFlowDebugSessionClient) addDataFlowCreateRequest(ctx context.C reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, request) + if err := runtime.MarshalAsJSON(req, request); err != nil { + return nil, err + } + return req, nil } // addDataFlowHandleResponse handles the AddDataFlow response. @@ -118,7 +123,8 @@ func (client *DataFlowDebugSessionClient) BeginCreate(ctx context.Context, resou if err != nil { return nil, err } - return runtime.NewPoller[DataFlowDebugSessionClientCreateResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[DataFlowDebugSessionClientCreateResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[DataFlowDebugSessionClientCreateResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -129,18 +135,20 @@ func (client *DataFlowDebugSessionClient) BeginCreate(ctx context.Context, resou // // Generated from API version 2018-06-01 func (client *DataFlowDebugSessionClient) create(ctx context.Context, resourceGroupName string, factoryName string, request CreateDataFlowDebugSessionRequest, options *DataFlowDebugSessionClientBeginCreateOptions) (*http.Response, error) { + var err error req, err := client.createCreateRequest(ctx, resourceGroupName, factoryName, request, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // createCreateRequest creates the Create request. @@ -166,7 +174,10 @@ func (client *DataFlowDebugSessionClient) createCreateRequest(ctx context.Contex reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, request) + if err := runtime.MarshalAsJSON(req, request); err != nil { + return nil, err + } + return req, nil } // Delete - Deletes a data flow debug session. @@ -179,16 +190,18 @@ func (client *DataFlowDebugSessionClient) createCreateRequest(ctx context.Contex // - options - DataFlowDebugSessionClientDeleteOptions contains the optional parameters for the DataFlowDebugSessionClient.Delete // method. func (client *DataFlowDebugSessionClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, request DeleteDataFlowDebugSessionRequest, options *DataFlowDebugSessionClientDeleteOptions) (DataFlowDebugSessionClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, request, options) if err != nil { return DataFlowDebugSessionClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DataFlowDebugSessionClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return DataFlowDebugSessionClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return DataFlowDebugSessionClientDeleteResponse{}, err } return DataFlowDebugSessionClientDeleteResponse{}, nil } @@ -216,7 +229,10 @@ func (client *DataFlowDebugSessionClient) deleteCreateRequest(ctx context.Contex reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, request) + if err := runtime.MarshalAsJSON(req, request); err != nil { + return nil, err + } + return req, nil } // BeginExecuteCommand - Execute a data flow debug command. @@ -234,7 +250,8 @@ func (client *DataFlowDebugSessionClient) BeginExecuteCommand(ctx context.Contex if err != nil { return nil, err } - return runtime.NewPoller[DataFlowDebugSessionClientExecuteCommandResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[DataFlowDebugSessionClientExecuteCommandResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[DataFlowDebugSessionClientExecuteCommandResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -245,18 +262,20 @@ func (client *DataFlowDebugSessionClient) BeginExecuteCommand(ctx context.Contex // // Generated from API version 2018-06-01 func (client *DataFlowDebugSessionClient) executeCommand(ctx context.Context, resourceGroupName string, factoryName string, request DataFlowDebugCommandRequest, options *DataFlowDebugSessionClientBeginExecuteCommandOptions) (*http.Response, error) { + var err error req, err := client.executeCommandCreateRequest(ctx, resourceGroupName, factoryName, request, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // executeCommandCreateRequest creates the ExecuteCommand request. @@ -282,7 +301,10 @@ func (client *DataFlowDebugSessionClient) executeCommandCreateRequest(ctx contex reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, request) + if err := runtime.MarshalAsJSON(req, request); err != nil { + return nil, err + } + return req, nil } // NewQueryByFactoryPager - Query all active data flow debug sessions. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client_example_test.go deleted file mode 100644 index 53a6cedb5e99..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/dataflowdebugsession_client_example_test.go +++ /dev/null @@ -1,265 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Create.json -func ExampleDataFlowDebugSessionClient_BeginCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewDataFlowDebugSessionClient().BeginCreate(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.CreateDataFlowDebugSessionRequest{ - IntegrationRuntime: &armdatafactory.IntegrationRuntimeDebugResource{ - Name: to.Ptr("ir1"), - Properties: &armdatafactory.ManagedIntegrationRuntime{ - Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeManaged), - TypeProperties: &armdatafactory.ManagedIntegrationRuntimeTypeProperties{ - ComputeProperties: &armdatafactory.IntegrationRuntimeComputeProperties{ - DataFlowProperties: &armdatafactory.IntegrationRuntimeDataFlowProperties{ - ComputeType: to.Ptr(armdatafactory.DataFlowComputeTypeGeneral), - CoreCount: to.Ptr[int32](48), - TimeToLive: to.Ptr[int32](10), - }, - Location: to.Ptr("AutoResolve"), - }, - }, - }, - }, - TimeToLive: to.Ptr[int32](60), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - res, err := poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.CreateDataFlowDebugSessionResponse = armdatafactory.CreateDataFlowDebugSessionResponse{ - // SessionID: to.Ptr("229c688c-944c-44ac-b31a-82d50f347154"), - // Status: to.Ptr("Succeeded"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_QueryByFactory.json -func ExampleDataFlowDebugSessionClient_NewQueryByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewDataFlowDebugSessionClient().NewQueryByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.QueryDataFlowDebugSessionsResponse = armdatafactory.QueryDataFlowDebugSessionsResponse{ - // Value: []*armdatafactory.DataFlowDebugSessionInfo{ - // { - // AdditionalProperties: map[string]any{ - // "dataflowName": "DebugSession-0a7e0d6e-f2b7-48cc-8cd8-618326f5662f", - // "userObjectId": "0a7e0d6e-f2b7-48cc-8cd8-618326f5662f", - // }, - // ComputeType: to.Ptr("General"), - // CoreCount: to.Ptr[int32](48), - // LastActivityTime: to.Ptr("2019-09-05T18:28:00.9459674+00:00"), - // SessionID: to.Ptr("229c688c-944c-44ac-b31a-82d50f347154"), - // StartTime: to.Ptr("2019-09-05T18:23:20.3257799+00:00"), - // TimeToLiveInMinutes: to.Ptr[int32](60), - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_AddDataFlow.json -func ExampleDataFlowDebugSessionClient_AddDataFlow() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDataFlowDebugSessionClient().AddDataFlow(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.DataFlowDebugPackage{ - DataFlow: &armdatafactory.DataFlowDebugResource{ - Name: to.Ptr("dataflow1"), - Properties: &armdatafactory.MappingDataFlow{ - Type: to.Ptr("MappingDataFlow"), - TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - Script: to.Ptr("\n\nsource(output(\n Column_1 as string\n ),\n allowSchemaDrift: true,\n validateSchema: false) ~> source1"), - Sinks: []*armdatafactory.DataFlowSink{}, - Sources: []*armdatafactory.DataFlowSource{ - { - Name: to.Ptr("source1"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("DelimitedText2"), - }, - }}, - Transformations: []*armdatafactory.Transformation{}, - }, - }, - }, - Datasets: []*armdatafactory.DatasetDebugResource{ - { - Name: to.Ptr("dataset1"), - Properties: &armdatafactory.DelimitedTextDataset{ - Type: to.Ptr("DelimitedText"), - Schema: []any{ - map[string]any{ - "type": "String", - }, - }, - Annotations: []any{}, - LinkedServiceName: &armdatafactory.LinkedServiceReference{ - Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - ReferenceName: to.Ptr("linkedService5"), - }, - TypeProperties: &armdatafactory.DelimitedTextDatasetTypeProperties{ - ColumnDelimiter: ",", - EscapeChar: "\\", - FirstRowAsHeader: true, - Location: &armdatafactory.AzureBlobStorageLocation{ - Type: to.Ptr("AzureBlobStorageLocation"), - FileName: "Ansiencoding.csv", - Container: "dataflow-sample-data", - }, - QuoteChar: "\"", - }, - }, - }}, - DebugSettings: &armdatafactory.DataFlowDebugPackageDebugSettings{ - DatasetParameters: map[string]any{ - "Movies": map[string]any{ - "path": "abc", - }, - "Output": map[string]any{ - "time": "def", - }, - }, - Parameters: map[string]any{ - "sourcePath": "Toy", - }, - SourceSettings: []*armdatafactory.DataFlowSourceSetting{ - { - RowLimit: to.Ptr[int32](1000), - SourceName: to.Ptr("source1"), - }, - { - RowLimit: to.Ptr[int32](222), - SourceName: to.Ptr("source2"), - }}, - }, - LinkedServices: []*armdatafactory.LinkedServiceDebugResource{ - { - Name: to.Ptr("linkedService1"), - Properties: &armdatafactory.AzureBlobStorageLinkedService{ - Type: to.Ptr("AzureBlobStorage"), - Annotations: []any{}, - TypeProperties: &armdatafactory.AzureBlobStorageLinkedServiceTypeProperties{ - ConnectionString: "DefaultEndpointsProtocol=https;AccountName=;EndpointSuffix=core.windows.net;", - EncryptedCredential: to.Ptr(""), - }, - }, - }}, - SessionID: to.Ptr("f06ed247-9d07-49b2-b05e-2cb4a2fc871e"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.AddDataFlowToDebugSessionResponse = armdatafactory.AddDataFlowToDebugSessionResponse{ - // JobVersion: to.Ptr("e5328ee7-c524-4207-8ba4-b709010db33d"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_Delete.json -func ExampleDataFlowDebugSessionClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewDataFlowDebugSessionClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.DeleteDataFlowDebugSessionRequest{ - SessionID: to.Ptr("91fb57e0-8292-47be-89ff-c8f2d2bb2a7e"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlowDebugSession_ExecuteCommand.json -func ExampleDataFlowDebugSessionClient_BeginExecuteCommand() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewDataFlowDebugSessionClient().BeginExecuteCommand(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.DataFlowDebugCommandRequest{ - Command: to.Ptr(armdatafactory.DataFlowDebugCommandTypeExecutePreviewQuery), - CommandPayload: &armdatafactory.DataFlowDebugCommandPayload{ - RowLimits: to.Ptr[int32](100), - StreamName: to.Ptr("source1"), - }, - SessionID: to.Ptr("f06ed247-9d07-49b2-b05e-2cb4a2fc871e"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - res, err := poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DataFlowDebugCommandResponse = armdatafactory.DataFlowDebugCommandResponse{ - // Data: to.Ptr("some output"), - // Status: to.Ptr("Succeeded"), - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client.go b/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client.go index 609d8b5f95ba..65f41dce8e79 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewDataFlowsClient(subscriptionID string, credential azcore.TokenCredential // - options - DataFlowsClientCreateOrUpdateOptions contains the optional parameters for the DataFlowsClient.CreateOrUpdate // method. func (client *DataFlowsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, dataFlowName string, dataFlow DataFlowResource, options *DataFlowsClientCreateOrUpdateOptions) (DataFlowsClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, dataFlowName, dataFlow, options) if err != nil { return DataFlowsClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DataFlowsClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return DataFlowsClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return DataFlowsClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -99,7 +101,10 @@ func (client *DataFlowsClient) createOrUpdateCreateRequest(ctx context.Context, req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, dataFlow) + if err := runtime.MarshalAsJSON(req, dataFlow); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -120,16 +125,18 @@ func (client *DataFlowsClient) createOrUpdateHandleResponse(resp *http.Response) // - dataFlowName - The data flow name. // - options - DataFlowsClientDeleteOptions contains the optional parameters for the DataFlowsClient.Delete method. func (client *DataFlowsClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, dataFlowName string, options *DataFlowsClientDeleteOptions) (DataFlowsClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, dataFlowName, options) if err != nil { return DataFlowsClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DataFlowsClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return DataFlowsClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return DataFlowsClientDeleteResponse{}, err } return DataFlowsClientDeleteResponse{}, nil } @@ -173,18 +180,21 @@ func (client *DataFlowsClient) deleteCreateRequest(ctx context.Context, resource // - dataFlowName - The data flow name. // - options - DataFlowsClientGetOptions contains the optional parameters for the DataFlowsClient.Get method. func (client *DataFlowsClient) Get(ctx context.Context, resourceGroupName string, factoryName string, dataFlowName string, options *DataFlowsClientGetOptions) (DataFlowsClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, dataFlowName, options) if err != nil { return DataFlowsClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DataFlowsClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return DataFlowsClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return DataFlowsClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client_example_test.go deleted file mode 100644 index c5495bca27a1..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/dataflows_client_example_test.go +++ /dev/null @@ -1,460 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Create.json -func ExampleDataFlowsClient_CreateOrUpdate_dataFlowsCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDataFlowsClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", armdatafactory.DataFlowResource{ - Properties: &armdatafactory.MappingDataFlow{ - Type: to.Ptr("MappingDataFlow"), - Description: to.Ptr("Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."), - TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - ScriptLines: []*string{ - to.Ptr("source(output("), - to.Ptr("PreviousConversionRate as double,"), - to.Ptr("Country as string,"), - to.Ptr("DateTime1 as string,"), - to.Ptr("CurrentConversionRate as double"), - to.Ptr("),"), - to.Ptr("allowSchemaDrift: false,"), - to.Ptr("validateSchema: false) ~> USDCurrency"), - to.Ptr("source(output("), - to.Ptr("PreviousConversionRate as double,"), - to.Ptr("Country as string,"), - to.Ptr("DateTime1 as string,"), - to.Ptr("CurrentConversionRate as double"), - to.Ptr("),"), - to.Ptr("allowSchemaDrift: true,"), - to.Ptr("validateSchema: false) ~> CADSource"), - to.Ptr("USDCurrency, CADSource union(byName: true)~> Union"), - to.Ptr("Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn"), - to.Ptr("NewCurrencyColumn split(Country == 'USD',"), - to.Ptr("Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)"), - to.Ptr("ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink"), - to.Ptr("ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")}, - Sinks: []*armdatafactory.DataFlowSink{ - { - Name: to.Ptr("USDSink"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("USDOutput"), - }, - }, - { - Name: to.Ptr("CADSink"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("CADOutput"), - }, - }}, - Sources: []*armdatafactory.DataFlowSource{ - { - Name: to.Ptr("USDCurrency"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("CurrencyDatasetUSD"), - }, - }, - { - Name: to.Ptr("CADSource"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("CurrencyDatasetCAD"), - }, - }}, - }, - }, - }, &armdatafactory.DataFlowsClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DataFlowResource = armdatafactory.DataFlowResource{ - // Name: to.Ptr("exampleDataFlow"), - // Type: to.Ptr("Microsoft.DataFactory/factories/dataflows"), - // Etag: to.Ptr("0a0066d4-0000-0000-0000-5b245bd20000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/datasets/exampleDataset"), - // Properties: &armdatafactory.MappingDataFlow{ - // Type: to.Ptr("MappingDataFlow"), - // Description: to.Ptr("Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."), - // TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - // ScriptLines: []*string{ - // to.Ptr("source(output("), - // to.Ptr("PreviousConversionRate as double,"), - // to.Ptr("Country as string,"), - // to.Ptr("DateTime1 as string,"), - // to.Ptr("CurrentConversionRate as double"), - // to.Ptr("),"), - // to.Ptr("allowSchemaDrift: false,"), - // to.Ptr("validateSchema: false) ~> USDCurrency"), - // to.Ptr("source(output("), - // to.Ptr("PreviousConversionRate as double,"), - // to.Ptr("Country as string,"), - // to.Ptr("DateTime1 as string,"), - // to.Ptr("CurrentConversionRate as double"), - // to.Ptr("),"), - // to.Ptr("allowSchemaDrift: true,"), - // to.Ptr("validateSchema: false) ~> CADSource"), - // to.Ptr("USDCurrency, CADSource union(byName: true)~> Union"), - // to.Ptr("Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn"), - // to.Ptr("NewCurrencyColumn split(Country == 'USD',"), - // to.Ptr("Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)"), - // to.Ptr("ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink"), - // to.Ptr("ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")}, - // Sinks: []*armdatafactory.DataFlowSink{ - // { - // Name: to.Ptr("USDSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("USDOutput"), - // }, - // }, - // { - // Name: to.Ptr("CADSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CADOutput"), - // }, - // }}, - // Sources: []*armdatafactory.DataFlowSource{ - // { - // Name: to.Ptr("USDCurrency"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetUSD"), - // }, - // }, - // { - // Name: to.Ptr("CADSource"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetCAD"), - // }, - // }}, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Update.json -func ExampleDataFlowsClient_CreateOrUpdate_dataFlowsUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDataFlowsClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", armdatafactory.DataFlowResource{ - Properties: &armdatafactory.MappingDataFlow{ - Type: to.Ptr("MappingDataFlow"), - Description: to.Ptr("Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."), - TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - ScriptLines: []*string{ - to.Ptr("source(output("), - to.Ptr("PreviousConversionRate as double,"), - to.Ptr("Country as string,"), - to.Ptr("DateTime1 as string,"), - to.Ptr("CurrentConversionRate as double"), - to.Ptr("),"), - to.Ptr("allowSchemaDrift: false,"), - to.Ptr("validateSchema: false) ~> USDCurrency"), - to.Ptr("source(output("), - to.Ptr("PreviousConversionRate as double,"), - to.Ptr("Country as string,"), - to.Ptr("DateTime1 as string,"), - to.Ptr("CurrentConversionRate as double"), - to.Ptr("),"), - to.Ptr("allowSchemaDrift: true,"), - to.Ptr("validateSchema: false) ~> CADSource"), - to.Ptr("USDCurrency, CADSource union(byName: true)~> Union"), - to.Ptr("Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn"), - to.Ptr("NewCurrencyColumn split(Country == 'USD',"), - to.Ptr("Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)"), - to.Ptr("ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink"), - to.Ptr("ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")}, - Sinks: []*armdatafactory.DataFlowSink{ - { - Name: to.Ptr("USDSink"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("USDOutput"), - }, - }, - { - Name: to.Ptr("CADSink"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("CADOutput"), - }, - }}, - Sources: []*armdatafactory.DataFlowSource{ - { - Name: to.Ptr("USDCurrency"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("CurrencyDatasetUSD"), - }, - }, - { - Name: to.Ptr("CADSource"), - Dataset: &armdatafactory.DatasetReference{ - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - ReferenceName: to.Ptr("CurrencyDatasetCAD"), - }, - }}, - }, - }, - }, &armdatafactory.DataFlowsClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DataFlowResource = armdatafactory.DataFlowResource{ - // Name: to.Ptr("exampleDataFlow"), - // Type: to.Ptr("Microsoft.DataFactory/factories/dataflows"), - // Etag: to.Ptr("0a0068d4-0000-0000-0000-5b245bd30002"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/datasets/exampleDataset"), - // Properties: &armdatafactory.MappingDataFlow{ - // Type: to.Ptr("MappingDataFlow"), - // Description: to.Ptr("Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."), - // TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - // ScriptLines: []*string{ - // to.Ptr("source(output("), - // to.Ptr("PreviousConversionRate as double,"), - // to.Ptr("Country as string,"), - // to.Ptr("DateTime1 as string,"), - // to.Ptr("CurrentConversionRate as double"), - // to.Ptr("),"), - // to.Ptr("allowSchemaDrift: false,"), - // to.Ptr("validateSchema: false) ~> USDCurrency"), - // to.Ptr("source(output("), - // to.Ptr("PreviousConversionRate as double,"), - // to.Ptr("Country as string,"), - // to.Ptr("DateTime1 as string,"), - // to.Ptr("CurrentConversionRate as double"), - // to.Ptr("),"), - // to.Ptr("allowSchemaDrift: true,"), - // to.Ptr("validateSchema: false) ~> CADSource"), - // to.Ptr("USDCurrency, CADSource union(byName: true)~> Union"), - // to.Ptr("Union derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn"), - // to.Ptr("NewCurrencyColumn split(Country == 'USD',"), - // to.Ptr("Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)"), - // to.Ptr("ConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink"), - // to.Ptr("ConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink")}, - // Sinks: []*armdatafactory.DataFlowSink{ - // { - // Name: to.Ptr("USDSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("USDOutput"), - // }, - // }, - // { - // Name: to.Ptr("CADSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CADOutput"), - // }, - // }}, - // Sources: []*armdatafactory.DataFlowSource{ - // { - // Name: to.Ptr("USDCurrency"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetUSD"), - // }, - // }, - // { - // Name: to.Ptr("CADSource"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetCAD"), - // }, - // }}, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Get.json -func ExampleDataFlowsClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDataFlowsClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", &armdatafactory.DataFlowsClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DataFlowResource = armdatafactory.DataFlowResource{ - // Name: to.Ptr("exampleDataFlow"), - // Type: to.Ptr("Microsoft.DataFactory/factories/dataflows"), - // Etag: to.Ptr("15004c4f-0000-0200-0000-5cbe090e0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/dataflows/exampleDataFlow"), - // Properties: &armdatafactory.MappingDataFlow{ - // Type: to.Ptr("MappingDataFlow"), - // Description: to.Ptr("Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."), - // TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - // Script: to.Ptr("source(output(PreviousConversionRate as double,Country as string,DateTime1 as string,CurrentConversionRate as double),allowSchemaDrift: false,validateSchema: false) ~> USDCurrency\nsource(output(PreviousConversionRate as double,Country as string,DateTime1 as string,CurrentConversionRate as double),allowSchemaDrift: true,validateSchema: false) ~> CADSource\nUSDCurrency, CADSource union(byName: true)~> Union\nUnion derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn\nNewCurrencyColumn split(Country == 'USD',Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)\nConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink\nConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink"), - // Sinks: []*armdatafactory.DataFlowSink{ - // { - // Name: to.Ptr("USDSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("USDOutput"), - // }, - // }, - // { - // Name: to.Ptr("CADSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CADOutput"), - // }, - // }}, - // Sources: []*armdatafactory.DataFlowSource{ - // { - // Name: to.Ptr("USDCurrency"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetUSD"), - // }, - // }, - // { - // Name: to.Ptr("CADSource"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetCAD"), - // }, - // }}, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_Delete.json -func ExampleDataFlowsClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewDataFlowsClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataFlow", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DataFlows_ListByFactory.json -func ExampleDataFlowsClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewDataFlowsClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.DataFlowListResponse = armdatafactory.DataFlowListResponse{ - // Value: []*armdatafactory.DataFlowResource{ - // { - // Name: to.Ptr("exampleDataFlow"), - // Type: to.Ptr("Microsoft.DataFactory/factories/dataflows"), - // Etag: to.Ptr("0a0068d4-0000-0000-0000-5b245bd30000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/dataflows/exampleDataFlow"), - // Properties: &armdatafactory.MappingDataFlow{ - // Type: to.Ptr("MappingDataFlow"), - // Description: to.Ptr("Sample demo data flow to convert currencies showing usage of union, derive and conditional split transformation."), - // TypeProperties: &armdatafactory.MappingDataFlowTypeProperties{ - // Script: to.Ptr("source(output(PreviousConversionRate as double,Country as string,DateTime1 as string,CurrentConversionRate as double),allowSchemaDrift: false,validateSchema: false) ~> USDCurrency\nsource(output(PreviousConversionRate as double,Country as string,DateTime1 as string,CurrentConversionRate as double),allowSchemaDrift: true,validateSchema: false) ~> CADSource\nUSDCurrency, CADSource union(byName: true)~> Union\nUnion derive(NewCurrencyRate = round(CurrentConversionRate*1.25)) ~> NewCurrencyColumn\nNewCurrencyColumn split(Country == 'USD',Country == 'CAD',disjoint: false) ~> ConditionalSplit1@(USD, CAD)\nConditionalSplit1@USD sink(saveMode:'overwrite' ) ~> USDSink\nConditionalSplit1@CAD sink(saveMode:'overwrite' ) ~> CADSink"), - // Sinks: []*armdatafactory.DataFlowSink{ - // { - // Name: to.Ptr("USDSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("USDOutput"), - // }, - // }, - // { - // Name: to.Ptr("CADSink"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CADOutput"), - // }, - // }}, - // Sources: []*armdatafactory.DataFlowSource{ - // { - // Name: to.Ptr("USDCurrency"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetUSD"), - // }, - // }, - // { - // Name: to.Ptr("CADSource"), - // Dataset: &armdatafactory.DatasetReference{ - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // ReferenceName: to.Ptr("CurrencyDatasetCAD"), - // }, - // }}, - // }, - // }, - // }}, - // } - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/datasets_client.go b/sdk/resourcemanager/datafactory/armdatafactory/datasets_client.go index c059c7eab251..9bc8db480df3 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/datasets_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/datasets_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,18 +53,21 @@ func NewDatasetsClient(subscriptionID string, credential azcore.TokenCredential, // - dataset - Dataset resource definition. // - options - DatasetsClientCreateOrUpdateOptions contains the optional parameters for the DatasetsClient.CreateOrUpdate method. func (client *DatasetsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, datasetName string, dataset DatasetResource, options *DatasetsClientCreateOrUpdateOptions) (DatasetsClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, datasetName, dataset, options) if err != nil { return DatasetsClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DatasetsClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return DatasetsClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return DatasetsClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -98,7 +100,10 @@ func (client *DatasetsClient) createOrUpdateCreateRequest(ctx context.Context, r req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, dataset) + if err := runtime.MarshalAsJSON(req, dataset); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -119,16 +124,18 @@ func (client *DatasetsClient) createOrUpdateHandleResponse(resp *http.Response) // - datasetName - The dataset name. // - options - DatasetsClientDeleteOptions contains the optional parameters for the DatasetsClient.Delete method. func (client *DatasetsClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, datasetName string, options *DatasetsClientDeleteOptions) (DatasetsClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, datasetName, options) if err != nil { return DatasetsClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DatasetsClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return DatasetsClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return DatasetsClientDeleteResponse{}, err } return DatasetsClientDeleteResponse{}, nil } @@ -172,18 +179,21 @@ func (client *DatasetsClient) deleteCreateRequest(ctx context.Context, resourceG // - datasetName - The dataset name. // - options - DatasetsClientGetOptions contains the optional parameters for the DatasetsClient.Get method. func (client *DatasetsClient) Get(ctx context.Context, resourceGroupName string, factoryName string, datasetName string, options *DatasetsClientGetOptions) (DatasetsClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, datasetName, options) if err != nil { return DatasetsClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return DatasetsClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return DatasetsClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return DatasetsClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/datasets_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/datasets_client_example_test.go deleted file mode 100644 index 43db79e2c529..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/datasets_client_example_test.go +++ /dev/null @@ -1,334 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_ListByFactory.json -func ExampleDatasetsClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewDatasetsClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.DatasetListResponse = armdatafactory.DatasetListResponse{ - // Value: []*armdatafactory.DatasetResource{ - // { - // Name: to.Ptr("exampleDataset"), - // Type: to.Ptr("Microsoft.DataFactory/factories/datasets"), - // Etag: to.Ptr("0a0068d4-0000-0000-0000-5b245bd30000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/datasets/exampleDataset"), - // Properties: &armdatafactory.AzureBlobDataset{ - // Type: to.Ptr("AzureBlob"), - // Description: to.Ptr("Example description"), - // LinkedServiceName: &armdatafactory.LinkedServiceReference{ - // Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - // ReferenceName: to.Ptr("exampleLinkedService"), - // }, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "MyFileName": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // "MyFolderPath": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // }, - // TypeProperties: &armdatafactory.AzureBlobDatasetTypeProperties{ - // Format: &armdatafactory.TextFormat{ - // Type: to.Ptr("TextFormat"), - // }, - // FileName: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFileName", - // }, - // FolderPath: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFolderPath", - // }, - // }, - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Create.json -func ExampleDatasetsClient_CreateOrUpdate_datasetsCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDatasetsClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataset", armdatafactory.DatasetResource{ - Properties: &armdatafactory.AzureBlobDataset{ - Type: to.Ptr("AzureBlob"), - LinkedServiceName: &armdatafactory.LinkedServiceReference{ - Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - ReferenceName: to.Ptr("exampleLinkedService"), - }, - Parameters: map[string]*armdatafactory.ParameterSpecification{ - "MyFileName": { - Type: to.Ptr(armdatafactory.ParameterTypeString), - }, - "MyFolderPath": { - Type: to.Ptr(armdatafactory.ParameterTypeString), - }, - }, - TypeProperties: &armdatafactory.AzureBlobDatasetTypeProperties{ - Format: &armdatafactory.TextFormat{ - Type: to.Ptr("TextFormat"), - }, - FileName: map[string]any{ - "type": "Expression", - "value": "@dataset().MyFileName", - }, - FolderPath: map[string]any{ - "type": "Expression", - "value": "@dataset().MyFolderPath", - }, - }, - }, - }, &armdatafactory.DatasetsClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DatasetResource = armdatafactory.DatasetResource{ - // Name: to.Ptr("exampleDataset"), - // Type: to.Ptr("Microsoft.DataFactory/factories/datasets"), - // Etag: to.Ptr("0a0066d4-0000-0000-0000-5b245bd20000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/datasets/exampleDataset"), - // Properties: &armdatafactory.AzureBlobDataset{ - // Type: to.Ptr("AzureBlob"), - // Schema: []any{ - // map[string]any{ - // "name": "col1", - // "type": "INT_32", - // }, - // map[string]any{ - // "name": "col2", - // "type": "Decimal", - // "precision": "38", - // "scale": "2", - // }, - // }, - // LinkedServiceName: &armdatafactory.LinkedServiceReference{ - // Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - // ReferenceName: to.Ptr("exampleLinkedService"), - // }, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "MyFileName": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // "MyFolderPath": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // }, - // TypeProperties: &armdatafactory.AzureBlobDatasetTypeProperties{ - // Format: &armdatafactory.TextFormat{ - // Type: to.Ptr("TextFormat"), - // }, - // FileName: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFileName", - // }, - // FolderPath: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFolderPath", - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Update.json -func ExampleDatasetsClient_CreateOrUpdate_datasetsUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDatasetsClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataset", armdatafactory.DatasetResource{ - Properties: &armdatafactory.AzureBlobDataset{ - Type: to.Ptr("AzureBlob"), - Description: to.Ptr("Example description"), - LinkedServiceName: &armdatafactory.LinkedServiceReference{ - Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - ReferenceName: to.Ptr("exampleLinkedService"), - }, - Parameters: map[string]*armdatafactory.ParameterSpecification{ - "MyFileName": { - Type: to.Ptr(armdatafactory.ParameterTypeString), - }, - "MyFolderPath": { - Type: to.Ptr(armdatafactory.ParameterTypeString), - }, - }, - TypeProperties: &armdatafactory.AzureBlobDatasetTypeProperties{ - Format: &armdatafactory.TextFormat{ - Type: to.Ptr("TextFormat"), - }, - FileName: map[string]any{ - "type": "Expression", - "value": "@dataset().MyFileName", - }, - FolderPath: map[string]any{ - "type": "Expression", - "value": "@dataset().MyFolderPath", - }, - }, - }, - }, &armdatafactory.DatasetsClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DatasetResource = armdatafactory.DatasetResource{ - // Name: to.Ptr("exampleDataset"), - // Type: to.Ptr("Microsoft.DataFactory/factories/datasets"), - // Etag: to.Ptr("0a0068d4-0000-0000-0000-5b245bd30000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/datasets/exampleDataset"), - // Properties: &armdatafactory.AzureBlobDataset{ - // Type: to.Ptr("AzureBlob"), - // Description: to.Ptr("Example description"), - // LinkedServiceName: &armdatafactory.LinkedServiceReference{ - // Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - // ReferenceName: to.Ptr("exampleLinkedService"), - // }, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "MyFileName": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // "MyFolderPath": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // }, - // TypeProperties: &armdatafactory.AzureBlobDatasetTypeProperties{ - // Format: &armdatafactory.TextFormat{ - // Type: to.Ptr("TextFormat"), - // }, - // FileName: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFileName", - // }, - // FolderPath: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFolderPath", - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Get.json -func ExampleDatasetsClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewDatasetsClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataset", &armdatafactory.DatasetsClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.DatasetResource = armdatafactory.DatasetResource{ - // Name: to.Ptr("exampleDataset"), - // Type: to.Ptr("Microsoft.DataFactory/factories/datasets"), - // Etag: to.Ptr("15004c4f-0000-0200-0000-5cbe090e0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/datasets/exampleDataset"), - // Properties: &armdatafactory.AzureBlobDataset{ - // Type: to.Ptr("AzureBlob"), - // Description: to.Ptr("Example description"), - // LinkedServiceName: &armdatafactory.LinkedServiceReference{ - // Type: to.Ptr(armdatafactory.LinkedServiceReferenceTypeLinkedServiceReference), - // ReferenceName: to.Ptr("exampleLinkedService"), - // }, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "MyFileName": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // "MyFolderPath": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // }, - // TypeProperties: &armdatafactory.AzureBlobDatasetTypeProperties{ - // Format: &armdatafactory.TextFormat{ - // Type: to.Ptr("TextFormat"), - // }, - // FileName: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFileName", - // }, - // FolderPath: map[string]any{ - // "type": "Expression", - // "value": "@dataset().MyFolderPath", - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Datasets_Delete.json -func ExampleDatasetsClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewDatasetsClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleDataset", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client.go b/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client.go index b0c6dfd8f1ea..143bd52a9618 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -53,18 +52,21 @@ func NewExposureControlClient(subscriptionID string, credential azcore.TokenCred // - options - ExposureControlClientGetFeatureValueOptions contains the optional parameters for the ExposureControlClient.GetFeatureValue // method. func (client *ExposureControlClient) GetFeatureValue(ctx context.Context, locationID string, exposureControlRequest ExposureControlRequest, options *ExposureControlClientGetFeatureValueOptions) (ExposureControlClientGetFeatureValueResponse, error) { + var err error req, err := client.getFeatureValueCreateRequest(ctx, locationID, exposureControlRequest, options) if err != nil { return ExposureControlClientGetFeatureValueResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ExposureControlClientGetFeatureValueResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ExposureControlClientGetFeatureValueResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ExposureControlClientGetFeatureValueResponse{}, err } - return client.getFeatureValueHandleResponse(resp) + resp, err := client.getFeatureValueHandleResponse(httpResp) + return resp, err } // getFeatureValueCreateRequest creates the GetFeatureValue request. @@ -86,7 +88,10 @@ func (client *ExposureControlClient) getFeatureValueCreateRequest(ctx context.Co reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, exposureControlRequest) + if err := runtime.MarshalAsJSON(req, exposureControlRequest); err != nil { + return nil, err + } + return req, nil } // getFeatureValueHandleResponse handles the GetFeatureValue response. @@ -108,18 +113,21 @@ func (client *ExposureControlClient) getFeatureValueHandleResponse(resp *http.Re // - options - ExposureControlClientGetFeatureValueByFactoryOptions contains the optional parameters for the ExposureControlClient.GetFeatureValueByFactory // method. func (client *ExposureControlClient) GetFeatureValueByFactory(ctx context.Context, resourceGroupName string, factoryName string, exposureControlRequest ExposureControlRequest, options *ExposureControlClientGetFeatureValueByFactoryOptions) (ExposureControlClientGetFeatureValueByFactoryResponse, error) { + var err error req, err := client.getFeatureValueByFactoryCreateRequest(ctx, resourceGroupName, factoryName, exposureControlRequest, options) if err != nil { return ExposureControlClientGetFeatureValueByFactoryResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ExposureControlClientGetFeatureValueByFactoryResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ExposureControlClientGetFeatureValueByFactoryResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ExposureControlClientGetFeatureValueByFactoryResponse{}, err } - return client.getFeatureValueByFactoryHandleResponse(resp) + resp, err := client.getFeatureValueByFactoryHandleResponse(httpResp) + return resp, err } // getFeatureValueByFactoryCreateRequest creates the GetFeatureValueByFactory request. @@ -145,7 +153,10 @@ func (client *ExposureControlClient) getFeatureValueByFactoryCreateRequest(ctx c reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, exposureControlRequest) + if err := runtime.MarshalAsJSON(req, exposureControlRequest); err != nil { + return nil, err + } + return req, nil } // getFeatureValueByFactoryHandleResponse handles the GetFeatureValueByFactory response. @@ -167,18 +178,21 @@ func (client *ExposureControlClient) getFeatureValueByFactoryHandleResponse(resp // - options - ExposureControlClientQueryFeatureValuesByFactoryOptions contains the optional parameters for the ExposureControlClient.QueryFeatureValuesByFactory // method. func (client *ExposureControlClient) QueryFeatureValuesByFactory(ctx context.Context, resourceGroupName string, factoryName string, exposureControlBatchRequest ExposureControlBatchRequest, options *ExposureControlClientQueryFeatureValuesByFactoryOptions) (ExposureControlClientQueryFeatureValuesByFactoryResponse, error) { + var err error req, err := client.queryFeatureValuesByFactoryCreateRequest(ctx, resourceGroupName, factoryName, exposureControlBatchRequest, options) if err != nil { return ExposureControlClientQueryFeatureValuesByFactoryResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ExposureControlClientQueryFeatureValuesByFactoryResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ExposureControlClientQueryFeatureValuesByFactoryResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ExposureControlClientQueryFeatureValuesByFactoryResponse{}, err } - return client.queryFeatureValuesByFactoryHandleResponse(resp) + resp, err := client.queryFeatureValuesByFactoryHandleResponse(httpResp) + return resp, err } // queryFeatureValuesByFactoryCreateRequest creates the QueryFeatureValuesByFactory request. @@ -204,7 +218,10 @@ func (client *ExposureControlClient) queryFeatureValuesByFactoryCreateRequest(ct reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, exposureControlBatchRequest) + if err := runtime.MarshalAsJSON(req, exposureControlBatchRequest); err != nil { + return nil, err + } + return req, nil } // queryFeatureValuesByFactoryHandleResponse handles the QueryFeatureValuesByFactory response. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client_example_test.go deleted file mode 100644 index afe0bf50db8c..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/exposurecontrol_client_example_test.go +++ /dev/null @@ -1,114 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_GetFeatureValue.json -func ExampleExposureControlClient_GetFeatureValue() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewExposureControlClient().GetFeatureValue(ctx, "WestEurope", armdatafactory.ExposureControlRequest{ - FeatureName: to.Ptr("ADFIntegrationRuntimeSharingRbac"), - FeatureType: to.Ptr("Feature"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ExposureControlResponse = armdatafactory.ExposureControlResponse{ - // FeatureName: to.Ptr("ADFIntegrationRuntimeSharingRbac"), - // Value: to.Ptr("False"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_GetFeatureValueByFactory.json -func ExampleExposureControlClient_GetFeatureValueByFactory() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewExposureControlClient().GetFeatureValueByFactory(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.ExposureControlRequest{ - FeatureName: to.Ptr("ADFIntegrationRuntimeSharingRbac"), - FeatureType: to.Ptr("Feature"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ExposureControlResponse = armdatafactory.ExposureControlResponse{ - // FeatureName: to.Ptr("ADFIntegrationRuntimeSharingRbac"), - // Value: to.Ptr("False"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ExposureControl_QueryFeatureValuesByFactory.json -func ExampleExposureControlClient_QueryFeatureValuesByFactory() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewExposureControlClient().QueryFeatureValuesByFactory(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.ExposureControlBatchRequest{ - ExposureControlRequests: []*armdatafactory.ExposureControlRequest{ - { - FeatureName: to.Ptr("ADFIntegrationRuntimeSharingRbac"), - FeatureType: to.Ptr("Feature"), - }, - { - FeatureName: to.Ptr("ADFSampleFeature"), - FeatureType: to.Ptr("Feature"), - }}, - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ExposureControlBatchResponse = armdatafactory.ExposureControlBatchResponse{ - // ExposureControlResponses: []*armdatafactory.ExposureControlResponse{ - // { - // FeatureName: to.Ptr("ADFIntegrationRuntimeSharingRbac"), - // Value: to.Ptr("False"), - // }, - // { - // FeatureName: to.Ptr("ADFSampleFeature"), - // Value: to.Ptr("True"), - // }}, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/factories_client.go b/sdk/resourcemanager/datafactory/armdatafactory/factories_client.go index 815c1297c21d..96cf448fa32e 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/factories_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/factories_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -53,18 +52,21 @@ func NewFactoriesClient(subscriptionID string, credential azcore.TokenCredential // - options - FactoriesClientConfigureFactoryRepoOptions contains the optional parameters for the FactoriesClient.ConfigureFactoryRepo // method. func (client *FactoriesClient) ConfigureFactoryRepo(ctx context.Context, locationID string, factoryRepoUpdate FactoryRepoUpdate, options *FactoriesClientConfigureFactoryRepoOptions) (FactoriesClientConfigureFactoryRepoResponse, error) { + var err error req, err := client.configureFactoryRepoCreateRequest(ctx, locationID, factoryRepoUpdate, options) if err != nil { return FactoriesClientConfigureFactoryRepoResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientConfigureFactoryRepoResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return FactoriesClientConfigureFactoryRepoResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientConfigureFactoryRepoResponse{}, err } - return client.configureFactoryRepoHandleResponse(resp) + resp, err := client.configureFactoryRepoHandleResponse(httpResp) + return resp, err } // configureFactoryRepoCreateRequest creates the ConfigureFactoryRepo request. @@ -86,7 +88,10 @@ func (client *FactoriesClient) configureFactoryRepoCreateRequest(ctx context.Con reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, factoryRepoUpdate) + if err := runtime.MarshalAsJSON(req, factoryRepoUpdate); err != nil { + return nil, err + } + return req, nil } // configureFactoryRepoHandleResponse handles the ConfigureFactoryRepo response. @@ -108,18 +113,21 @@ func (client *FactoriesClient) configureFactoryRepoHandleResponse(resp *http.Res // - options - FactoriesClientCreateOrUpdateOptions contains the optional parameters for the FactoriesClient.CreateOrUpdate // method. func (client *FactoriesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, factory Factory, options *FactoriesClientCreateOrUpdateOptions) (FactoriesClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, factory, options) if err != nil { return FactoriesClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return FactoriesClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -148,7 +156,10 @@ func (client *FactoriesClient) createOrUpdateCreateRequest(ctx context.Context, req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, factory) + if err := runtime.MarshalAsJSON(req, factory); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -168,16 +179,18 @@ func (client *FactoriesClient) createOrUpdateHandleResponse(resp *http.Response) // - factoryName - The factory name. // - options - FactoriesClientDeleteOptions contains the optional parameters for the FactoriesClient.Delete method. func (client *FactoriesClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, options *FactoriesClientDeleteOptions) (FactoriesClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, options) if err != nil { return FactoriesClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return FactoriesClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientDeleteResponse{}, err } return FactoriesClientDeleteResponse{}, nil } @@ -216,18 +229,21 @@ func (client *FactoriesClient) deleteCreateRequest(ctx context.Context, resource // - factoryName - The factory name. // - options - FactoriesClientGetOptions contains the optional parameters for the FactoriesClient.Get method. func (client *FactoriesClient) Get(ctx context.Context, resourceGroupName string, factoryName string, options *FactoriesClientGetOptions) (FactoriesClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, options) if err != nil { return FactoriesClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return FactoriesClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. @@ -278,18 +294,21 @@ func (client *FactoriesClient) getHandleResponse(resp *http.Response) (Factories // - options - FactoriesClientGetDataPlaneAccessOptions contains the optional parameters for the FactoriesClient.GetDataPlaneAccess // method. func (client *FactoriesClient) GetDataPlaneAccess(ctx context.Context, resourceGroupName string, factoryName string, policy UserAccessPolicy, options *FactoriesClientGetDataPlaneAccessOptions) (FactoriesClientGetDataPlaneAccessResponse, error) { + var err error req, err := client.getDataPlaneAccessCreateRequest(ctx, resourceGroupName, factoryName, policy, options) if err != nil { return FactoriesClientGetDataPlaneAccessResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientGetDataPlaneAccessResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return FactoriesClientGetDataPlaneAccessResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientGetDataPlaneAccessResponse{}, err } - return client.getDataPlaneAccessHandleResponse(resp) + resp, err := client.getDataPlaneAccessHandleResponse(httpResp) + return resp, err } // getDataPlaneAccessCreateRequest creates the GetDataPlaneAccess request. @@ -315,7 +334,10 @@ func (client *FactoriesClient) getDataPlaneAccessCreateRequest(ctx context.Conte reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, policy) + if err := runtime.MarshalAsJSON(req, policy); err != nil { + return nil, err + } + return req, nil } // getDataPlaneAccessHandleResponse handles the GetDataPlaneAccess response. @@ -337,18 +359,21 @@ func (client *FactoriesClient) getDataPlaneAccessHandleResponse(resp *http.Respo // - options - FactoriesClientGetGitHubAccessTokenOptions contains the optional parameters for the FactoriesClient.GetGitHubAccessToken // method. func (client *FactoriesClient) GetGitHubAccessToken(ctx context.Context, resourceGroupName string, factoryName string, gitHubAccessTokenRequest GitHubAccessTokenRequest, options *FactoriesClientGetGitHubAccessTokenOptions) (FactoriesClientGetGitHubAccessTokenResponse, error) { + var err error req, err := client.getGitHubAccessTokenCreateRequest(ctx, resourceGroupName, factoryName, gitHubAccessTokenRequest, options) if err != nil { return FactoriesClientGetGitHubAccessTokenResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientGetGitHubAccessTokenResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return FactoriesClientGetGitHubAccessTokenResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientGetGitHubAccessTokenResponse{}, err } - return client.getGitHubAccessTokenHandleResponse(resp) + resp, err := client.getGitHubAccessTokenHandleResponse(httpResp) + return resp, err } // getGitHubAccessTokenCreateRequest creates the GetGitHubAccessToken request. @@ -374,7 +399,10 @@ func (client *FactoriesClient) getGitHubAccessTokenCreateRequest(ctx context.Con reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, gitHubAccessTokenRequest) + if err := runtime.MarshalAsJSON(req, gitHubAccessTokenRequest); err != nil { + return nil, err + } + return req, nil } // getGitHubAccessTokenHandleResponse handles the GetGitHubAccessToken response. @@ -519,18 +547,21 @@ func (client *FactoriesClient) listByResourceGroupHandleResponse(resp *http.Resp // - factoryUpdateParameters - The parameters for updating a factory. // - options - FactoriesClientUpdateOptions contains the optional parameters for the FactoriesClient.Update method. func (client *FactoriesClient) Update(ctx context.Context, resourceGroupName string, factoryName string, factoryUpdateParameters FactoryUpdateParameters, options *FactoriesClientUpdateOptions) (FactoriesClientUpdateResponse, error) { + var err error req, err := client.updateCreateRequest(ctx, resourceGroupName, factoryName, factoryUpdateParameters, options) if err != nil { return FactoriesClientUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return FactoriesClientUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return FactoriesClientUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return FactoriesClientUpdateResponse{}, err } - return client.updateHandleResponse(resp) + resp, err := client.updateHandleResponse(httpResp) + return resp, err } // updateCreateRequest creates the Update request. @@ -556,7 +587,10 @@ func (client *FactoriesClient) updateCreateRequest(ctx context.Context, resource reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, factoryUpdateParameters) + if err := runtime.MarshalAsJSON(req, factoryUpdateParameters); err != nil { + return nil, err + } + return req, nil } // updateHandleResponse handles the Update response. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/factories_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/factories_client_example_test.go deleted file mode 100644 index d9bf69baaeff..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/factories_client_example_test.go +++ /dev/null @@ -1,592 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_List.json -func ExampleFactoriesClient_NewListPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewFactoriesClient().NewListPager(nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.FactoryListResponse = armdatafactory.FactoryListResponse{ - // Value: []*armdatafactory.Factory{ - // { - // Name: to.Ptr("rpV2OrigDF-72c7d3d4-5e17-4ec6-91de-9ab433f15e79"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"0000aa0d-0000-0000-0000-5b0d58170000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/rg-yanzhang-dfv2/providers/Microsoft.DataFactory/factories/rpv2origdf-72c7d3d4-5e17-4ec6-91de-9ab433f15e79"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("399c3de2-6072-4326-bfa9-4d0c116f1a7b"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-05-29T13:39:35.615921Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("df-dogfood-yanzhang-we"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"0000f301-0000-0000-0000-5b21b16c0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/rg-yanzhang-dfv2/providers/Microsoft.DataFactory/factories/df-dogfood-yanzhang-we"), - // Location: to.Ptr("West Europe"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("e8dd6df9-bad5-4dea-8fb8-0d13d1845d9e"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T00:06:04.6667461Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("exampleFactoryName-linked"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00008a02-0000-0000-0000-5b237f270000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName-linked"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("10743799-44d2-42fe-8c4d-5bc5c51c0684"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-15T08:56:07.1828318Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("FactoryToUpgrade"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00003d04-0000-0000-0000-5b28962f0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/factorytoupgrade"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:35:35.7133828Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2018-06-01"), - // }, - // }, - // { - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00004004-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // "exampleTag": to.Ptr("exampleValue"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // PurviewConfiguration: &armdatafactory.PurviewConfiguration{ - // PurviewResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Purview/accounts/examplePurview"), - // }, - // RepoConfiguration: &armdatafactory.FactoryVSTSConfiguration{ - // Type: to.Ptr("FactoryVSTSConfiguration"), - // AccountName: to.Ptr("ADF"), - // CollaborationBranch: to.Ptr("master"), - // LastCommitID: to.Ptr(""), - // RepositoryName: to.Ptr("repo"), - // RootFolder: to.Ptr("/"), - // ProjectName: to.Ptr("project"), - // TenantID: to.Ptr(""), - // }, - // Version: to.Ptr("2018-06-01"), - // }, - // }, - // { - // Name: to.Ptr("rpV2OrigDF-72c7d3d4-5e17-4ec6-91de-9ab433f15e79"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"0000aa0d-0000-0000-0000-5b0d58170000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/rg-yanzhang-dfv2/providers/Microsoft.DataFactory/factories/rpv2origdf-72c7d3d4-5e17-4ec6-91de-9ab433f15e79"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("399c3de2-6072-4326-bfa9-4d0c116f1a7b"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-05-29T13:39:35.615921Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("df-dogfood-yanzhang-we"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"0000f301-0000-0000-0000-5b21b16c0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/rg-yanzhang-dfv2/providers/Microsoft.DataFactory/factories/df-dogfood-yanzhang-we"), - // Location: to.Ptr("West Europe"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("e8dd6df9-bad5-4dea-8fb8-0d13d1845d9e"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T00:06:04.6667461Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("exampleFactoryName-linked"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00008a02-0000-0000-0000-5b237f270000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName-linked"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("10743799-44d2-42fe-8c4d-5bc5c51c0684"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-15T08:56:07.1828318Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("FactoryToUpgrade"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00003d04-0000-0000-0000-5b28962f0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/factorytoupgrade"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:35:35.7133828Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2018-06-01"), - // }, - // }, - // { - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00004004-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // "exampleTag": to.Ptr("exampleValue"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // PurviewConfiguration: &armdatafactory.PurviewConfiguration{ - // PurviewResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Purview/accounts/examplePurview"), - // }, - // RepoConfiguration: &armdatafactory.FactoryVSTSConfiguration{ - // Type: to.Ptr("FactoryVSTSConfiguration"), - // AccountName: to.Ptr("ADF"), - // CollaborationBranch: to.Ptr("master"), - // LastCommitID: to.Ptr(""), - // RepositoryName: to.Ptr("repo"), - // RootFolder: to.Ptr("/"), - // ProjectName: to.Ptr("project"), - // TenantID: to.Ptr(""), - // }, - // Version: to.Ptr("2018-06-01"), - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_ConfigureFactoryRepo.json -func ExampleFactoriesClient_ConfigureFactoryRepo() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewFactoriesClient().ConfigureFactoryRepo(ctx, "East US", armdatafactory.FactoryRepoUpdate{ - FactoryResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - RepoConfiguration: &armdatafactory.FactoryVSTSConfiguration{ - Type: to.Ptr("FactoryVSTSConfiguration"), - AccountName: to.Ptr("ADF"), - CollaborationBranch: to.Ptr("master"), - LastCommitID: to.Ptr(""), - RepositoryName: to.Ptr("repo"), - RootFolder: to.Ptr("/"), - ProjectName: to.Ptr("project"), - TenantID: to.Ptr(""), - }, - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.Factory = armdatafactory.Factory{ - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00004004-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // "exampleTag": to.Ptr("exampleValue"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // RepoConfiguration: &armdatafactory.FactoryVSTSConfiguration{ - // Type: to.Ptr("FactoryVSTSConfiguration"), - // AccountName: to.Ptr("ADF"), - // CollaborationBranch: to.Ptr("master"), - // LastCommitID: to.Ptr(""), - // RepositoryName: to.Ptr("repo"), - // RootFolder: to.Ptr("/"), - // ProjectName: to.Ptr("project"), - // TenantID: to.Ptr(""), - // }, - // Version: to.Ptr("2018-06-01"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_ListByResourceGroup.json -func ExampleFactoriesClient_NewListByResourceGroupPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewFactoriesClient().NewListByResourceGroupPager("exampleResourceGroup", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.FactoryListResponse = armdatafactory.FactoryListResponse{ - // Value: []*armdatafactory.Factory{ - // { - // Name: to.Ptr("exampleFactoryName-linked"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00008a02-0000-0000-0000-5b237f270000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName-linked"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Identity: &armdatafactory.FactoryIdentity{ - // Type: to.Ptr(armdatafactory.FactoryIdentityTypeSystemAssigned), - // PrincipalID: to.Ptr("10743799-44d2-42fe-8c4d-5bc5c51c0684"), - // TenantID: to.Ptr("12345678-1234-1234-1234-123456789abc"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-15T08:56:07.1828318Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2017-09-01-preview"), - // }, - // }, - // { - // Name: to.Ptr("FactoryToUpgrade"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00003d04-0000-0000-0000-5b28962f0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/factorytoupgrade"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:35:35.7133828Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2018-06-01"), - // }, - // }, - // { - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00004004-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // "exampleTag": to.Ptr("exampleValue"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // RepoConfiguration: &armdatafactory.FactoryVSTSConfiguration{ - // Type: to.Ptr("FactoryVSTSConfiguration"), - // AccountName: to.Ptr("ADF"), - // CollaborationBranch: to.Ptr("master"), - // LastCommitID: to.Ptr(""), - // RepositoryName: to.Ptr("repo"), - // RootFolder: to.Ptr("/"), - // ProjectName: to.Ptr("project"), - // TenantID: to.Ptr(""), - // }, - // Version: to.Ptr("2018-06-01"), - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_CreateOrUpdate.json -func ExampleFactoriesClient_CreateOrUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewFactoriesClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.Factory{ - Location: to.Ptr("East US"), - }, &armdatafactory.FactoriesClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.Factory = armdatafactory.Factory{ - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00003e04-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2018-06-01"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Update.json -func ExampleFactoriesClient_Update() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewFactoriesClient().Update(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.FactoryUpdateParameters{ - Tags: map[string]*string{ - "exampleTag": to.Ptr("exampleValue"), - }, - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.Factory = armdatafactory.Factory{ - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00003f04-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // "exampleTag": to.Ptr("exampleValue"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // Version: to.Ptr("2018-06-01"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Get.json -func ExampleFactoriesClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewFactoriesClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", &armdatafactory.FactoriesClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.Factory = armdatafactory.Factory{ - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories"), - // ETag: to.Ptr("\"00004004-0000-0000-0000-5b28979e0000\""), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Location: to.Ptr("East US"), - // Tags: map[string]*string{ - // "exampleTag": to.Ptr("exampleValue"), - // }, - // Properties: &armdatafactory.FactoryProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-19T05:41:50.0041314Z"); return t}()), - // ProvisioningState: to.Ptr("Succeeded"), - // PurviewConfiguration: &armdatafactory.PurviewConfiguration{ - // PurviewResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Purview/accounts/examplePurview"), - // }, - // RepoConfiguration: &armdatafactory.FactoryVSTSConfiguration{ - // Type: to.Ptr("FactoryVSTSConfiguration"), - // AccountName: to.Ptr("ADF"), - // CollaborationBranch: to.Ptr("master"), - // LastCommitID: to.Ptr(""), - // RepositoryName: to.Ptr("repo"), - // RootFolder: to.Ptr("/"), - // ProjectName: to.Ptr("project"), - // TenantID: to.Ptr(""), - // }, - // Version: to.Ptr("2018-06-01"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_Delete.json -func ExampleFactoriesClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewFactoriesClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_GetGitHubAccessToken.json -func ExampleFactoriesClient_GetGitHubAccessToken() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewFactoriesClient().GetGitHubAccessToken(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.GitHubAccessTokenRequest{ - GitHubAccessCode: to.Ptr("some"), - GitHubAccessTokenBaseURL: to.Ptr("some"), - GitHubClientID: to.Ptr("some"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.GitHubAccessTokenResponse = armdatafactory.GitHubAccessTokenResponse{ - // GitHubAccessToken: to.Ptr("myAccessTokenExample"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Factories_GetDataPlaneAccess.json -func ExampleFactoriesClient_GetDataPlaneAccess() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewFactoriesClient().GetDataPlaneAccess(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.UserAccessPolicy{ - AccessResourcePath: to.Ptr(""), - ExpireTime: to.Ptr("2018-11-10T09:46:20.2659347Z"), - Permissions: to.Ptr("r"), - ProfileName: to.Ptr("DefaultProfile"), - StartTime: to.Ptr("2018-11-10T02:46:20.2659347Z"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.AccessPolicyResponse = armdatafactory.AccessPolicyResponse{ - // AccessToken: to.Ptr("**********"), - // DataPlaneURL: to.Ptr("https://rpeastus.svc.datafactory.azure.com:4433"), - // Policy: &armdatafactory.UserAccessPolicy{ - // AccessResourcePath: to.Ptr(""), - // ExpireTime: to.Ptr("2018-11-10T09:46:20.2659347Z"), - // Permissions: to.Ptr("r"), - // ProfileName: to.Ptr("DefaultProfile"), - // StartTime: to.Ptr("2018-11-10T02:46:20.2659347Z"), - // }, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client.go b/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client.go index ce1526cf1791..757174bea875 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewGlobalParametersClient(subscriptionID string, credential azcore.TokenCre // - options - GlobalParametersClientCreateOrUpdateOptions contains the optional parameters for the GlobalParametersClient.CreateOrUpdate // method. func (client *GlobalParametersClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, globalParameterName string, defaultParam GlobalParameterResource, options *GlobalParametersClientCreateOrUpdateOptions) (GlobalParametersClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, globalParameterName, defaultParam, options) if err != nil { return GlobalParametersClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return GlobalParametersClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return GlobalParametersClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return GlobalParametersClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -96,7 +98,10 @@ func (client *GlobalParametersClient) createOrUpdateCreateRequest(ctx context.Co reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, defaultParam) + if err := runtime.MarshalAsJSON(req, defaultParam); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -117,16 +122,18 @@ func (client *GlobalParametersClient) createOrUpdateHandleResponse(resp *http.Re // - globalParameterName - The global parameter name. // - options - GlobalParametersClientDeleteOptions contains the optional parameters for the GlobalParametersClient.Delete method. func (client *GlobalParametersClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, globalParameterName string, options *GlobalParametersClientDeleteOptions) (GlobalParametersClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, globalParameterName, options) if err != nil { return GlobalParametersClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return GlobalParametersClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return GlobalParametersClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return GlobalParametersClientDeleteResponse{}, err } return GlobalParametersClientDeleteResponse{}, nil } @@ -170,18 +177,21 @@ func (client *GlobalParametersClient) deleteCreateRequest(ctx context.Context, r // - globalParameterName - The global parameter name. // - options - GlobalParametersClientGetOptions contains the optional parameters for the GlobalParametersClient.Get method. func (client *GlobalParametersClient) Get(ctx context.Context, resourceGroupName string, factoryName string, globalParameterName string, options *GlobalParametersClientGetOptions) (GlobalParametersClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, globalParameterName, options) if err != nil { return GlobalParametersClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return GlobalParametersClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return GlobalParametersClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return GlobalParametersClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client_example_test.go deleted file mode 100644 index f13d4e5a6689..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/globalparameters_client_example_test.go +++ /dev/null @@ -1,209 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_ListByFactory.json -func ExampleGlobalParametersClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewGlobalParametersClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.GlobalParameterListResponse = armdatafactory.GlobalParameterListResponse{ - // Value: []*armdatafactory.GlobalParameterResource{ - // { - // Name: to.Ptr("default"), - // Type: to.Ptr("Microsoft.DataFactory/factories/globalParameters"), - // Etag: to.Ptr("da00a1c3-0000-0400-0000-6241f3290000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/globalParameters/default"), - // Properties: map[string]*armdatafactory.GlobalParameterSpecification{ - // "copyPipelineTest": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeObject), - // Value: map[string]any{ - // "mySinkDatasetFolderPath": "exampleOutput", - // "mySourceDatasetFolderPath": "exampleInput/", - // "testingEmptyContextParams": "", - // }, - // }, - // "mySourceDatasetFolderPath": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeString), - // Value: "input", - // }, - // "url": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeString), - // Value: "https://testuri.test", - // }, - // "validADFOffice365Uris": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeArray), - // Value: []any{ - // "https://testuri.test", - // "https://testuri.test", - // }, - // }, - // "waitTime": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeInt), - // Value: float64(5), - // }, - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Get.json -func ExampleGlobalParametersClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewGlobalParametersClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "default", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.GlobalParameterResource = armdatafactory.GlobalParameterResource{ - // Name: to.Ptr("default"), - // Type: to.Ptr("Microsoft.DataFactory/factories/globalParameters"), - // Etag: to.Ptr("72001a6a-0000-0400-0000-623d058f0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/globalParameters/default"), - // Properties: map[string]*armdatafactory.GlobalParameterSpecification{ - // "waitTime": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeInt), - // Value: float64(10), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Create.json -func ExampleGlobalParametersClient_CreateOrUpdate_globalParametersCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewGlobalParametersClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "default", armdatafactory.GlobalParameterResource{ - Properties: map[string]*armdatafactory.GlobalParameterSpecification{ - "waitTime": { - Type: to.Ptr(armdatafactory.GlobalParameterTypeInt), - Value: float64(5), - }, - }, - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.GlobalParameterResource = armdatafactory.GlobalParameterResource{ - // Name: to.Ptr("default"), - // Type: to.Ptr("Microsoft.DataFactory/factories/globalParameters"), - // Etag: to.Ptr("0a008ad4-0000-0000-0000-5b245c6e0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/globalParameters/default"), - // Properties: map[string]*armdatafactory.GlobalParameterSpecification{ - // "waitTime": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeInt), - // Value: float64(5), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Update.json -func ExampleGlobalParametersClient_CreateOrUpdate_globalParametersUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewGlobalParametersClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "default", armdatafactory.GlobalParameterResource{ - Properties: map[string]*armdatafactory.GlobalParameterSpecification{ - "waitTime": { - Type: to.Ptr(armdatafactory.GlobalParameterTypeInt), - Value: float64(5), - }, - }, - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.GlobalParameterResource = armdatafactory.GlobalParameterResource{ - // Name: to.Ptr("default"), - // Type: to.Ptr("Microsoft.DataFactory/factories/globalParameters"), - // Etag: to.Ptr("0a008ad4-0000-0000-0000-5b245c6e0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/globalParameters/default"), - // Properties: map[string]*armdatafactory.GlobalParameterSpecification{ - // "waitTime": &armdatafactory.GlobalParameterSpecification{ - // Type: to.Ptr(armdatafactory.GlobalParameterTypeInt), - // Value: float64(5), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GlobalParameters_Delete.json -func ExampleGlobalParametersClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewGlobalParametersClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "default", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/go.mod b/sdk/resourcemanager/datafactory/armdatafactory/go.mod index 20eae3ddc5e0..ee2d4fd315db 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/go.mod +++ b/sdk/resourcemanager/datafactory/armdatafactory/go.mod @@ -1,17 +1,17 @@ -module github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3 +module github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v4 go 1.18 require ( - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0 - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.2 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.1 + github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2 + github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 github.com/stretchr/testify v1.7.0 ) require ( - github.com/Azure/azure-sdk-for-go/sdk/internal v1.2.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 // indirect + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.2 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v0.9.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dnaeon/go-vcr v1.1.0 // indirect @@ -21,9 +21,9 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect golang.org/x/crypto v0.6.0 // indirect - golang.org/x/net v0.7.0 // indirect - golang.org/x/sys v0.5.0 // indirect - golang.org/x/text v0.7.0 // indirect + golang.org/x/net v0.8.0 // indirect + golang.org/x/sys v0.6.0 // indirect + golang.org/x/text v0.8.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) diff --git a/sdk/resourcemanager/datafactory/armdatafactory/go.sum b/sdk/resourcemanager/datafactory/armdatafactory/go.sum index 6e2c77825a2c..93fdfc2850cd 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/go.sum +++ b/sdk/resourcemanager/datafactory/armdatafactory/go.sum @@ -1,9 +1,9 @@ -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0 h1:rTnT/Jrcm+figWlYz4Ixzt0SJVR2cMC8lvZcimipiEY= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0/go.mod h1:ON4tFdPTwRcgWEaVDrN3584Ef+b7GgSJaXxe5fW9t4M= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.1 h1:SEy2xmstIphdPwNBUi7uhvjyjhVKISfwjfOJmuy7kg4= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.2 h1:uqM+VoHjVH6zdlkLF2b6O0ZANcHoj3rO0PoQ3jglUJA= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.2/go.mod h1:twTKAa1E6hLmSDjLhaCkbTMQKc7p/rNLU40rLxGEOCI= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.2.0 h1:leh5DwKv6Ihwi+h60uHtn6UWAxBbZ0q8DwQVMzf61zw= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.2.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2 h1:mLY+pNLjCUeKhgnAJWAKhEUQM+RJQo2H1fuGSw1Ky1E= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2/go.mod h1:FbdwsQ2EzwvXxOPcMFYO8ogEc9uMMIj3YkmCdXdAFmk= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 h1:ECsQtyERDVz3NP3kvDOTLvbQhqWp/x9EsGKtb4ogUr8= @@ -31,13 +31,13 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client.go b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client.go index f23ac620d986..23faacf5dd58 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,16 +54,18 @@ func NewIntegrationRuntimeNodesClient(subscriptionID string, credential azcore.T // - options - IntegrationRuntimeNodesClientDeleteOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Delete // method. func (client *IntegrationRuntimeNodesClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, nodeName string, options *IntegrationRuntimeNodesClientDeleteOptions) (IntegrationRuntimeNodesClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, nodeName, options) if err != nil { return IntegrationRuntimeNodesClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimeNodesClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return IntegrationRuntimeNodesClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimeNodesClientDeleteResponse{}, err } return IntegrationRuntimeNodesClientDeleteResponse{}, nil } @@ -114,18 +115,21 @@ func (client *IntegrationRuntimeNodesClient) deleteCreateRequest(ctx context.Con // - options - IntegrationRuntimeNodesClientGetOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Get // method. func (client *IntegrationRuntimeNodesClient) Get(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, nodeName string, options *IntegrationRuntimeNodesClientGetOptions) (IntegrationRuntimeNodesClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, nodeName, options) if err != nil { return IntegrationRuntimeNodesClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimeNodesClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimeNodesClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimeNodesClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. @@ -182,18 +186,21 @@ func (client *IntegrationRuntimeNodesClient) getHandleResponse(resp *http.Respon // - options - IntegrationRuntimeNodesClientGetIPAddressOptions contains the optional parameters for the IntegrationRuntimeNodesClient.GetIPAddress // method. func (client *IntegrationRuntimeNodesClient) GetIPAddress(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, nodeName string, options *IntegrationRuntimeNodesClientGetIPAddressOptions) (IntegrationRuntimeNodesClientGetIPAddressResponse, error) { + var err error req, err := client.getIPAddressCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, nodeName, options) if err != nil { return IntegrationRuntimeNodesClientGetIPAddressResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimeNodesClientGetIPAddressResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimeNodesClientGetIPAddressResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimeNodesClientGetIPAddressResponse{}, err } - return client.getIPAddressHandleResponse(resp) + resp, err := client.getIPAddressHandleResponse(httpResp) + return resp, err } // getIPAddressCreateRequest creates the GetIPAddress request. @@ -251,18 +258,21 @@ func (client *IntegrationRuntimeNodesClient) getIPAddressHandleResponse(resp *ht // - options - IntegrationRuntimeNodesClientUpdateOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Update // method. func (client *IntegrationRuntimeNodesClient) Update(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, nodeName string, updateIntegrationRuntimeNodeRequest UpdateIntegrationRuntimeNodeRequest, options *IntegrationRuntimeNodesClientUpdateOptions) (IntegrationRuntimeNodesClientUpdateResponse, error) { + var err error req, err := client.updateCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, nodeName, updateIntegrationRuntimeNodeRequest, options) if err != nil { return IntegrationRuntimeNodesClientUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimeNodesClientUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimeNodesClientUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimeNodesClientUpdateResponse{}, err } - return client.updateHandleResponse(resp) + resp, err := client.updateHandleResponse(httpResp) + return resp, err } // updateCreateRequest creates the Update request. @@ -296,7 +306,10 @@ func (client *IntegrationRuntimeNodesClient) updateCreateRequest(ctx context.Con reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, updateIntegrationRuntimeNodeRequest) + if err := runtime.MarshalAsJSON(req, updateIntegrationRuntimeNodeRequest); err != nil { + return nil, err + } + return req, nil } // updateHandleResponse handles the Update response. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client_example_test.go deleted file mode 100644 index 31af30959a71..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimenodes_client_example_test.go +++ /dev/null @@ -1,144 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Get.json -func ExampleIntegrationRuntimeNodesClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimeNodesClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.SelfHostedIntegrationRuntimeNode = armdatafactory.SelfHostedIntegrationRuntimeNode{ - // Capabilities: map[string]*string{ - // "connectedToResourceManager": to.Ptr("True"), - // "credentialInSync": to.Ptr("True"), - // "httpsPortEnabled": to.Ptr("True"), - // "nodeEnabled": to.Ptr("True"), - // "serviceBusConnected": to.Ptr("True"), - // }, - // HostServiceURI: to.Ptr("https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/"), - // IsActiveDispatcher: to.Ptr(true), - // LastConnectTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T06:30:46.6262976Z"); return t}()), - // LastStartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T03:45:30.8499851Z"); return t}()), - // LastUpdateResult: to.Ptr(armdatafactory.IntegrationRuntimeUpdateResultNone), - // MachineName: to.Ptr("YANZHANG-DT"), - // MaxConcurrentJobs: to.Ptr[int32](20), - // NodeName: to.Ptr("Node_1"), - // RegisterTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T03:44:55.8012825Z"); return t}()), - // Status: to.Ptr(armdatafactory.SelfHostedIntegrationRuntimeNodeStatusOnline), - // Version: to.Ptr("3.8.6743.6"), - // VersionStatus: to.Ptr("UpToDate"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Delete.json -func ExampleIntegrationRuntimeNodesClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewIntegrationRuntimeNodesClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_Update.json -func ExampleIntegrationRuntimeNodesClient_Update() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimeNodesClient().Update(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", armdatafactory.UpdateIntegrationRuntimeNodeRequest{ - ConcurrentJobsLimit: to.Ptr[int32](2), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.SelfHostedIntegrationRuntimeNode = armdatafactory.SelfHostedIntegrationRuntimeNode{ - // Capabilities: map[string]*string{ - // "connectedToResourceManager": to.Ptr("True"), - // "credentialInSync": to.Ptr("True"), - // "httpsPortEnabled": to.Ptr("True"), - // "nodeEnabled": to.Ptr("True"), - // "serviceBusConnected": to.Ptr("True"), - // }, - // ConcurrentJobsLimit: to.Ptr[int32](2), - // HostServiceURI: to.Ptr("https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/"), - // IsActiveDispatcher: to.Ptr(true), - // LastConnectTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T15:00:35.7544322Z"); return t}()), - // LastStartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T14:52:59.8933313Z"); return t}()), - // LastUpdateResult: to.Ptr(armdatafactory.IntegrationRuntimeUpdateResultNone), - // MachineName: to.Ptr("YANZHANG-DT"), - // MaxConcurrentJobs: to.Ptr[int32](56), - // NodeName: to.Ptr("Node_1"), - // RegisterTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T14:51:44.9237069Z"); return t}()), - // Status: to.Ptr(armdatafactory.SelfHostedIntegrationRuntimeNodeStatusOnline), - // Version: to.Ptr("3.8.6730.2"), - // VersionStatus: to.Ptr("UpToDate"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeNodes_GetIpAddress.json -func ExampleIntegrationRuntimeNodesClient_GetIPAddress() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimeNodesClient().GetIPAddress(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", "Node_1", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeNodeIPAddress = armdatafactory.IntegrationRuntimeNodeIPAddress{ - // IPAddress: to.Ptr("**********"), - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client.go b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client.go index c2679e924d49..20a0dcd4011a 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,18 +53,21 @@ func NewIntegrationRuntimeObjectMetadataClient(subscriptionID string, credential // - options - IntegrationRuntimeObjectMetadataClientGetOptions contains the optional parameters for the IntegrationRuntimeObjectMetadataClient.Get // method. func (client *IntegrationRuntimeObjectMetadataClient) Get(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimeObjectMetadataClientGetOptions) (IntegrationRuntimeObjectMetadataClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimeObjectMetadataClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimeObjectMetadataClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimeObjectMetadataClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimeObjectMetadataClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. @@ -96,7 +98,10 @@ func (client *IntegrationRuntimeObjectMetadataClient) getCreateRequest(ctx conte req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} if options != nil && options.GetMetadataRequest != nil { - return req, runtime.MarshalAsJSON(req, *options.GetMetadataRequest) + if err := runtime.MarshalAsJSON(req, *options.GetMetadataRequest); err != nil { + return nil, err + } + return req, nil } return req, nil } @@ -125,7 +130,8 @@ func (client *IntegrationRuntimeObjectMetadataClient) BeginRefresh(ctx context.C if err != nil { return nil, err } - return runtime.NewPoller[IntegrationRuntimeObjectMetadataClientRefreshResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[IntegrationRuntimeObjectMetadataClientRefreshResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[IntegrationRuntimeObjectMetadataClientRefreshResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -136,18 +142,20 @@ func (client *IntegrationRuntimeObjectMetadataClient) BeginRefresh(ctx context.C // // Generated from API version 2018-06-01 func (client *IntegrationRuntimeObjectMetadataClient) refresh(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimeObjectMetadataClientBeginRefreshOptions) (*http.Response, error) { + var err error req, err := client.refreshCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // refreshCreateRequest creates the Refresh request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client_example_test.go deleted file mode 100644 index 058c62d25037..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimeobjectmetadata_client_example_test.go +++ /dev/null @@ -1,577 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeObjectMetadata_Refresh.json -func ExampleIntegrationRuntimeObjectMetadataClient_BeginRefresh() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewIntegrationRuntimeObjectMetadataClient().BeginRefresh(ctx, "exampleResourceGroup", "exampleFactoryName", "testactivityv2", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - res, err := poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.SsisObjectMetadataStatusResponse = armdatafactory.SsisObjectMetadataStatusResponse{ - // Name: to.Ptr("ca63c855b72d44959653ffcc6eb0b96c"), - // Status: to.Ptr("Succeeded"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimeObjectMetadata_Get.json -func ExampleIntegrationRuntimeObjectMetadataClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimeObjectMetadataClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "testactivityv2", &armdatafactory.IntegrationRuntimeObjectMetadataClientGetOptions{GetMetadataRequest: &armdatafactory.GetSsisObjectMetadataRequest{ - MetadataPath: to.Ptr("ssisFolders"), - }, - }) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.SsisObjectMetadataListResponse = armdatafactory.SsisObjectMetadataListResponse{ - // Value: []armdatafactory.SsisObjectMetadataClassification{ - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("TestFolder"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // Description: to.Ptr(""), - // ID: to.Ptr[int64](1), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("EnvironmentFolder"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // Description: to.Ptr(""), - // ID: to.Ptr[int64](2), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("ActivityTest"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // Description: to.Ptr(""), - // ID: to.Ptr[int64](3), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("这是文件夹"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // Description: to.Ptr(""), - // ID: to.Ptr[int64](4), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("1"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](5), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("2"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](6), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("3"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](7), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("4"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](8), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("5"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](9), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("6"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](10), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("7"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](11), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("8"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](12), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("9"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](13), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("10"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](14), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("11"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](15), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("12"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](16), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("13"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](17), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("14"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](18), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("15"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](19), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("16"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](20), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("17"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](21), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("18"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](22), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("19"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](23), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("20"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](24), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("21"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](25), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("22"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](26), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("23"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](27), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("24"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](28), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("25"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](29), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("26"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](30), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("27"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](31), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("28"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](32), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("29"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](33), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("30"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](34), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("31"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](35), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("32"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](36), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("33"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](37), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("34"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](38), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("35"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](39), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("36"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](40), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("37"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](41), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("38"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](42), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("39"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](43), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("40"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](44), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("41"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](45), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("42"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](46), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("43"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](47), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("44"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](48), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("45"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](49), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("46"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](50), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("47"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](51), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("48"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](52), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("49"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](53), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("50"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](54), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("51"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](55), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("52"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](56), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("53"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](57), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("54"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](58), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("55"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](59), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("56"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](60), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("57"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](61), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("58"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](62), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("59"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](63), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("60"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](64), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("61"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](65), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("62"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](66), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("63"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](67), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("64"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](68), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("65"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](69), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("66"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](70), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("67"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](71), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("68"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](72), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("69"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](73), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("70"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](74), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("71"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](75), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("72"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](76), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("73"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](77), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("74"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](78), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("75"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](79), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("76"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](80), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("77"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](81), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("78"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](82), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("79"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](83), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("80"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](84), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("81"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](85), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("82"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](86), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("83"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](87), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("84"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](88), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("85"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](89), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("86"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](90), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("87"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](91), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("88"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](92), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("89"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](93), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("90"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](94), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("91"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](95), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("92"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](96), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("93"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](97), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("94"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](98), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("95"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](99), - // }, - // &armdatafactory.SsisFolder{ - // Name: to.Ptr("96"), - // Type: to.Ptr(armdatafactory.SsisObjectMetadataTypeFolder), - // ID: to.Ptr[int64](100), - // }}, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client.go b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client.go index 7e7761e9c7e3..41be57ceafb0 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewIntegrationRuntimesClient(subscriptionID string, credential azcore.Token // - options - IntegrationRuntimesClientCreateLinkedIntegrationRuntimeOptions contains the optional parameters for the IntegrationRuntimesClient.CreateLinkedIntegrationRuntime // method. func (client *IntegrationRuntimesClient) CreateLinkedIntegrationRuntime(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, createLinkedIntegrationRuntimeRequest CreateLinkedIntegrationRuntimeRequest, options *IntegrationRuntimesClientCreateLinkedIntegrationRuntimeOptions) (IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse, error) { + var err error req, err := client.createLinkedIntegrationRuntimeCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, createLinkedIntegrationRuntimeRequest, options) if err != nil { return IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse{}, err } - return client.createLinkedIntegrationRuntimeHandleResponse(resp) + resp, err := client.createLinkedIntegrationRuntimeHandleResponse(httpResp) + return resp, err } // createLinkedIntegrationRuntimeCreateRequest creates the CreateLinkedIntegrationRuntime request. @@ -96,7 +98,10 @@ func (client *IntegrationRuntimesClient) createLinkedIntegrationRuntimeCreateReq reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, createLinkedIntegrationRuntimeRequest) + if err := runtime.MarshalAsJSON(req, createLinkedIntegrationRuntimeRequest); err != nil { + return nil, err + } + return req, nil } // createLinkedIntegrationRuntimeHandleResponse handles the CreateLinkedIntegrationRuntime response. @@ -119,18 +124,21 @@ func (client *IntegrationRuntimesClient) createLinkedIntegrationRuntimeHandleRes // - options - IntegrationRuntimesClientCreateOrUpdateOptions contains the optional parameters for the IntegrationRuntimesClient.CreateOrUpdate // method. func (client *IntegrationRuntimesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, integrationRuntime IntegrationRuntimeResource, options *IntegrationRuntimesClientCreateOrUpdateOptions) (IntegrationRuntimesClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, integrationRuntime, options) if err != nil { return IntegrationRuntimesClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -163,7 +171,10 @@ func (client *IntegrationRuntimesClient) createOrUpdateCreateRequest(ctx context req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, integrationRuntime) + if err := runtime.MarshalAsJSON(req, integrationRuntime); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -185,16 +196,18 @@ func (client *IntegrationRuntimesClient) createOrUpdateHandleResponse(resp *http // - options - IntegrationRuntimesClientDeleteOptions contains the optional parameters for the IntegrationRuntimesClient.Delete // method. func (client *IntegrationRuntimesClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientDeleteOptions) (IntegrationRuntimesClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return IntegrationRuntimesClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientDeleteResponse{}, err } return IntegrationRuntimesClientDeleteResponse{}, nil } @@ -238,18 +251,21 @@ func (client *IntegrationRuntimesClient) deleteCreateRequest(ctx context.Context // - integrationRuntimeName - The integration runtime name. // - options - IntegrationRuntimesClientGetOptions contains the optional parameters for the IntegrationRuntimesClient.Get method. func (client *IntegrationRuntimesClient) Get(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientGetOptions) (IntegrationRuntimesClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return IntegrationRuntimesClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. @@ -305,18 +321,21 @@ func (client *IntegrationRuntimesClient) getHandleResponse(resp *http.Response) // - options - IntegrationRuntimesClientGetConnectionInfoOptions contains the optional parameters for the IntegrationRuntimesClient.GetConnectionInfo // method. func (client *IntegrationRuntimesClient) GetConnectionInfo(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientGetConnectionInfoOptions) (IntegrationRuntimesClientGetConnectionInfoResponse, error) { + var err error req, err := client.getConnectionInfoCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientGetConnectionInfoResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientGetConnectionInfoResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientGetConnectionInfoResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientGetConnectionInfoResponse{}, err } - return client.getConnectionInfoHandleResponse(resp) + resp, err := client.getConnectionInfoHandleResponse(httpResp) + return resp, err } // getConnectionInfoCreateRequest creates the GetConnectionInfo request. @@ -369,18 +388,21 @@ func (client *IntegrationRuntimesClient) getConnectionInfoHandleResponse(resp *h // - options - IntegrationRuntimesClientGetMonitoringDataOptions contains the optional parameters for the IntegrationRuntimesClient.GetMonitoringData // method. func (client *IntegrationRuntimesClient) GetMonitoringData(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientGetMonitoringDataOptions) (IntegrationRuntimesClientGetMonitoringDataResponse, error) { + var err error req, err := client.getMonitoringDataCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientGetMonitoringDataResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientGetMonitoringDataResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientGetMonitoringDataResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientGetMonitoringDataResponse{}, err } - return client.getMonitoringDataHandleResponse(resp) + resp, err := client.getMonitoringDataHandleResponse(httpResp) + return resp, err } // getMonitoringDataCreateRequest creates the GetMonitoringData request. @@ -432,18 +454,21 @@ func (client *IntegrationRuntimesClient) getMonitoringDataHandleResponse(resp *h // - options - IntegrationRuntimesClientGetStatusOptions contains the optional parameters for the IntegrationRuntimesClient.GetStatus // method. func (client *IntegrationRuntimesClient) GetStatus(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientGetStatusOptions) (IntegrationRuntimesClientGetStatusResponse, error) { + var err error req, err := client.getStatusCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientGetStatusResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientGetStatusResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientGetStatusResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientGetStatusResponse{}, err } - return client.getStatusHandleResponse(resp) + resp, err := client.getStatusHandleResponse(httpResp) + return resp, err } // getStatusCreateRequest creates the GetStatus request. @@ -495,18 +520,21 @@ func (client *IntegrationRuntimesClient) getStatusHandleResponse(resp *http.Resp // - options - IntegrationRuntimesClientListAuthKeysOptions contains the optional parameters for the IntegrationRuntimesClient.ListAuthKeys // method. func (client *IntegrationRuntimesClient) ListAuthKeys(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientListAuthKeysOptions) (IntegrationRuntimesClientListAuthKeysResponse, error) { + var err error req, err := client.listAuthKeysCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientListAuthKeysResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientListAuthKeysResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientListAuthKeysResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientListAuthKeysResponse{}, err } - return client.listAuthKeysHandleResponse(resp) + resp, err := client.listAuthKeysHandleResponse(httpResp) + return resp, err } // listAuthKeysCreateRequest creates the ListAuthKeys request. @@ -629,18 +657,21 @@ func (client *IntegrationRuntimesClient) listByFactoryHandleResponse(resp *http. // - options - IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsOptions contains the optional parameters for // the IntegrationRuntimesClient.ListOutboundNetworkDependenciesEndpoints method. func (client *IntegrationRuntimesClient) ListOutboundNetworkDependenciesEndpoints(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsOptions) (IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse, error) { + var err error req, err := client.listOutboundNetworkDependenciesEndpointsCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse{}, err } - return client.listOutboundNetworkDependenciesEndpointsHandleResponse(resp) + resp, err := client.listOutboundNetworkDependenciesEndpointsHandleResponse(httpResp) + return resp, err } // listOutboundNetworkDependenciesEndpointsCreateRequest creates the ListOutboundNetworkDependenciesEndpoints request. @@ -693,18 +724,21 @@ func (client *IntegrationRuntimesClient) listOutboundNetworkDependenciesEndpoint // - options - IntegrationRuntimesClientRegenerateAuthKeyOptions contains the optional parameters for the IntegrationRuntimesClient.RegenerateAuthKey // method. func (client *IntegrationRuntimesClient) RegenerateAuthKey(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, regenerateKeyParameters IntegrationRuntimeRegenerateKeyParameters, options *IntegrationRuntimesClientRegenerateAuthKeyOptions) (IntegrationRuntimesClientRegenerateAuthKeyResponse, error) { + var err error req, err := client.regenerateAuthKeyCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, regenerateKeyParameters, options) if err != nil { return IntegrationRuntimesClientRegenerateAuthKeyResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientRegenerateAuthKeyResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientRegenerateAuthKeyResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientRegenerateAuthKeyResponse{}, err } - return client.regenerateAuthKeyHandleResponse(resp) + resp, err := client.regenerateAuthKeyHandleResponse(httpResp) + return resp, err } // regenerateAuthKeyCreateRequest creates the RegenerateAuthKey request. @@ -734,7 +768,10 @@ func (client *IntegrationRuntimesClient) regenerateAuthKeyCreateRequest(ctx cont reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, regenerateKeyParameters) + if err := runtime.MarshalAsJSON(req, regenerateKeyParameters); err != nil { + return nil, err + } + return req, nil } // regenerateAuthKeyHandleResponse handles the RegenerateAuthKey response. @@ -757,16 +794,18 @@ func (client *IntegrationRuntimesClient) regenerateAuthKeyHandleResponse(resp *h // - options - IntegrationRuntimesClientRemoveLinksOptions contains the optional parameters for the IntegrationRuntimesClient.RemoveLinks // method. func (client *IntegrationRuntimesClient) RemoveLinks(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, linkedIntegrationRuntimeRequest LinkedIntegrationRuntimeRequest, options *IntegrationRuntimesClientRemoveLinksOptions) (IntegrationRuntimesClientRemoveLinksResponse, error) { + var err error req, err := client.removeLinksCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, linkedIntegrationRuntimeRequest, options) if err != nil { return IntegrationRuntimesClientRemoveLinksResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientRemoveLinksResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientRemoveLinksResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientRemoveLinksResponse{}, err } return IntegrationRuntimesClientRemoveLinksResponse{}, nil } @@ -798,7 +837,10 @@ func (client *IntegrationRuntimesClient) removeLinksCreateRequest(ctx context.Co reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, linkedIntegrationRuntimeRequest) + if err := runtime.MarshalAsJSON(req, linkedIntegrationRuntimeRequest); err != nil { + return nil, err + } + return req, nil } // BeginStart - Starts a ManagedReserved type integration runtime. @@ -816,7 +858,8 @@ func (client *IntegrationRuntimesClient) BeginStart(ctx context.Context, resourc if err != nil { return nil, err } - return runtime.NewPoller[IntegrationRuntimesClientStartResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[IntegrationRuntimesClientStartResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[IntegrationRuntimesClientStartResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -827,18 +870,20 @@ func (client *IntegrationRuntimesClient) BeginStart(ctx context.Context, resourc // // Generated from API version 2018-06-01 func (client *IntegrationRuntimesClient) start(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientBeginStartOptions) (*http.Response, error) { + var err error req, err := client.startCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // startCreateRequest creates the Start request. @@ -886,7 +931,8 @@ func (client *IntegrationRuntimesClient) BeginStop(ctx context.Context, resource if err != nil { return nil, err } - return runtime.NewPoller[IntegrationRuntimesClientStopResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[IntegrationRuntimesClientStopResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[IntegrationRuntimesClientStopResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -897,18 +943,20 @@ func (client *IntegrationRuntimesClient) BeginStop(ctx context.Context, resource // // Generated from API version 2018-06-01 func (client *IntegrationRuntimesClient) stop(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientBeginStopOptions) (*http.Response, error) { + var err error req, err := client.stopCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // stopCreateRequest creates the Stop request. @@ -954,16 +1002,18 @@ func (client *IntegrationRuntimesClient) stopCreateRequest(ctx context.Context, // - options - IntegrationRuntimesClientSyncCredentialsOptions contains the optional parameters for the IntegrationRuntimesClient.SyncCredentials // method. func (client *IntegrationRuntimesClient) SyncCredentials(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientSyncCredentialsOptions) (IntegrationRuntimesClientSyncCredentialsResponse, error) { + var err error req, err := client.syncCredentialsCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientSyncCredentialsResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientSyncCredentialsResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientSyncCredentialsResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientSyncCredentialsResponse{}, err } return IntegrationRuntimesClientSyncCredentialsResponse{}, nil } @@ -1009,18 +1059,21 @@ func (client *IntegrationRuntimesClient) syncCredentialsCreateRequest(ctx contex // - options - IntegrationRuntimesClientUpdateOptions contains the optional parameters for the IntegrationRuntimesClient.Update // method. func (client *IntegrationRuntimesClient) Update(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, updateIntegrationRuntimeRequest UpdateIntegrationRuntimeRequest, options *IntegrationRuntimesClientUpdateOptions) (IntegrationRuntimesClientUpdateResponse, error) { + var err error req, err := client.updateCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, updateIntegrationRuntimeRequest, options) if err != nil { return IntegrationRuntimesClientUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientUpdateResponse{}, err } - return client.updateHandleResponse(resp) + resp, err := client.updateHandleResponse(httpResp) + return resp, err } // updateCreateRequest creates the Update request. @@ -1050,7 +1103,10 @@ func (client *IntegrationRuntimesClient) updateCreateRequest(ctx context.Context reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, updateIntegrationRuntimeRequest) + if err := runtime.MarshalAsJSON(req, updateIntegrationRuntimeRequest); err != nil { + return nil, err + } + return req, nil } // updateHandleResponse handles the Update response. @@ -1072,16 +1128,18 @@ func (client *IntegrationRuntimesClient) updateHandleResponse(resp *http.Respons // - options - IntegrationRuntimesClientUpgradeOptions contains the optional parameters for the IntegrationRuntimesClient.Upgrade // method. func (client *IntegrationRuntimesClient) Upgrade(ctx context.Context, resourceGroupName string, factoryName string, integrationRuntimeName string, options *IntegrationRuntimesClientUpgradeOptions) (IntegrationRuntimesClientUpgradeResponse, error) { + var err error req, err := client.upgradeCreateRequest(ctx, resourceGroupName, factoryName, integrationRuntimeName, options) if err != nil { return IntegrationRuntimesClientUpgradeResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return IntegrationRuntimesClientUpgradeResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return IntegrationRuntimesClientUpgradeResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return IntegrationRuntimesClientUpgradeResponse{}, err } return IntegrationRuntimesClientUpgradeResponse{}, nil } diff --git a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client_example_test.go deleted file mode 100644 index 21721da758ea..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/integrationruntimes_client_example_test.go +++ /dev/null @@ -1,638 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListByFactory.json -func ExampleIntegrationRuntimesClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewIntegrationRuntimesClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.IntegrationRuntimeListResponse = armdatafactory.IntegrationRuntimeListResponse{ - // Value: []*armdatafactory.IntegrationRuntimeResource{ - // { - // Name: to.Ptr("exampleIntegrationRuntime"), - // Type: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes"), - // Etag: to.Ptr("0400f1a1-0000-0000-0000-5b2188640000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime"), - // Properties: &armdatafactory.SelfHostedIntegrationRuntime{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - // Description: to.Ptr("A selfhosted integration runtime"), - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Create.json -func ExampleIntegrationRuntimesClient_CreateOrUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", armdatafactory.IntegrationRuntimeResource{ - Properties: &armdatafactory.SelfHostedIntegrationRuntime{ - Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - Description: to.Ptr("A selfhosted integration runtime"), - }, - }, &armdatafactory.IntegrationRuntimesClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeResource = armdatafactory.IntegrationRuntimeResource{ - // Name: to.Ptr("exampleIntegrationRuntime"), - // Type: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes"), - // Etag: to.Ptr("000046c4-0000-0000-0000-5b2198bf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime"), - // Properties: &armdatafactory.SelfHostedIntegrationRuntime{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - // Description: to.Ptr("A selfhosted integration runtime"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Get.json -func ExampleIntegrationRuntimesClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", &armdatafactory.IntegrationRuntimesClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeResource = armdatafactory.IntegrationRuntimeResource{ - // Name: to.Ptr("exampleIntegrationRuntime"), - // Type: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes"), - // Etag: to.Ptr("15003c4f-0000-0200-0000-5cbe090b0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime"), - // Properties: &armdatafactory.SelfHostedIntegrationRuntime{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - // Description: to.Ptr("A selfhosted integration runtime"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Update.json -func ExampleIntegrationRuntimesClient_Update() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().Update(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", armdatafactory.UpdateIntegrationRuntimeRequest{ - AutoUpdate: to.Ptr(armdatafactory.IntegrationRuntimeAutoUpdateOff), - UpdateDelayOffset: to.Ptr("\"PT3H\""), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeResource = armdatafactory.IntegrationRuntimeResource{ - // Name: to.Ptr("exampleIntegrationRuntime"), - // Type: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes"), - // Etag: to.Ptr("0400f1a1-0000-0000-0000-5b2188640000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime"), - // Properties: &armdatafactory.SelfHostedIntegrationRuntime{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - // Description: to.Ptr("A selfhosted integration runtime"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Delete.json -func ExampleIntegrationRuntimesClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewIntegrationRuntimesClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetStatus.json -func ExampleIntegrationRuntimesClient_GetStatus() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().GetStatus(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeStatusResponse = armdatafactory.IntegrationRuntimeStatusResponse{ - // Name: to.Ptr("exampleIntegrationRuntime"), - // Properties: &armdatafactory.SelfHostedIntegrationRuntimeStatus{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - // State: to.Ptr(armdatafactory.IntegrationRuntimeStateOnline), - // TypeProperties: &armdatafactory.SelfHostedIntegrationRuntimeStatusTypeProperties{ - // AutoUpdate: to.Ptr(armdatafactory.IntegrationRuntimeAutoUpdateOff), - // Capabilities: map[string]*string{ - // "connectedToResourceManager": to.Ptr("True"), - // "credentialInSync": to.Ptr("True"), - // "httpsPortEnabled": to.Ptr("True"), - // "nodeEnabled": to.Ptr("True"), - // "serviceBusConnected": to.Ptr("True"), - // }, - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T09:17:45.1839685Z"); return t}()), - // LatestVersion: to.Ptr("3.7.6711.1"), - // LocalTimeZoneOffset: to.Ptr("PT8H"), - // Nodes: []*armdatafactory.SelfHostedIntegrationRuntimeNode{ - // { - // Capabilities: map[string]*string{ - // "connectedToResourceManager": to.Ptr("True"), - // "credentialInSync": to.Ptr("True"), - // "httpsPortEnabled": to.Ptr("True"), - // "nodeEnabled": to.Ptr("True"), - // "serviceBusConnected": to.Ptr("True"), - // }, - // HostServiceURI: to.Ptr("https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/"), - // IsActiveDispatcher: to.Ptr(true), - // LastConnectTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T14:52:59.8933313Z"); return t}()), - // LastStartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T14:52:59.8933313Z"); return t}()), - // LastUpdateResult: to.Ptr(armdatafactory.IntegrationRuntimeUpdateResultNone), - // MachineName: to.Ptr("YANZHANG-DT"), - // MaxConcurrentJobs: to.Ptr[int32](56), - // NodeName: to.Ptr("Node_1"), - // RegisterTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-14T14:51:44.9237069Z"); return t}()), - // Status: to.Ptr(armdatafactory.SelfHostedIntegrationRuntimeNodeStatusOnline), - // Version: to.Ptr("3.8.6730.2"), - // VersionStatus: to.Ptr("UpToDate"), - // }}, - // ServiceUrls: []*string{ - // to.Ptr("wu.frontend.int.clouddatahub-int.net"), - // to.Ptr("*.servicebus.windows.net")}, - // TaskQueueID: to.Ptr("1a6296ab-423c-4346-9bcc-85a78c2c0582"), - // UpdateDelayOffset: to.Ptr("PT3H"), - // Version: to.Ptr("3.8.6730.2"), - // VersionStatus: to.Ptr("UpToDate"), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListOutboundNetworkDependenciesEndpoints.json -func ExampleIntegrationRuntimesClient_ListOutboundNetworkDependenciesEndpoints() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().ListOutboundNetworkDependenciesEndpoints(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse = armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse{ - // Value: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint{ - // { - // Category: to.Ptr("Azure Data Factory (Management)"), - // Endpoints: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpoint{ - // { - // DomainName: to.Ptr("wu.frontend.int.clouddatahub-int.net"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }}, - // }, - // { - // Category: to.Ptr("Azure Storage (Management)"), - // Endpoints: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpoint{ - // { - // DomainName: to.Ptr("*.blob.core.windows.net"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }, - // { - // DomainName: to.Ptr("*.table.core.windows.net"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }}, - // }, - // { - // Category: to.Ptr("Event Hub (Logging)"), - // Endpoints: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpoint{ - // { - // DomainName: to.Ptr("*.servicebus.windows.net"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }}, - // }, - // { - // Category: to.Ptr("Microsoft Logging service (Internal Use)"), - // Endpoints: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpoint{ - // { - // DomainName: to.Ptr("gcs.prod.monitoring.core.windows.net"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }, - // { - // DomainName: to.Ptr("prod.warmpath.msftcloudes.com"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }, - // { - // DomainName: to.Ptr("azurewatsonanalysis-prod.core.windows.net"), - // EndpointDetails: []*armdatafactory.IntegrationRuntimeOutboundNetworkDependenciesEndpointDetails{ - // { - // Port: to.Ptr[int32](443), - // }}, - // }}, - // }}, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetConnectionInfo.json -func ExampleIntegrationRuntimesClient_GetConnectionInfo() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().GetConnectionInfo(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeConnectionInfo = armdatafactory.IntegrationRuntimeConnectionInfo{ - // HostServiceURI: to.Ptr("https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/"), - // IdentityCertThumbprint: to.Ptr("**********"), - // IsIdentityCertExprired: to.Ptr(false), - // PublicKey: to.Ptr("**********"), - // ServiceToken: to.Ptr("**********"), - // Version: to.Ptr("3.8.6730.2"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RegenerateAuthKey.json -func ExampleIntegrationRuntimesClient_RegenerateAuthKey() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().RegenerateAuthKey(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", armdatafactory.IntegrationRuntimeRegenerateKeyParameters{ - KeyName: to.Ptr(armdatafactory.IntegrationRuntimeAuthKeyNameAuthKey2), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeAuthKeys = armdatafactory.IntegrationRuntimeAuthKeys{ - // AuthKey2: to.Ptr("**********"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_ListAuthKeys.json -func ExampleIntegrationRuntimesClient_ListAuthKeys() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().ListAuthKeys(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeAuthKeys = armdatafactory.IntegrationRuntimeAuthKeys{ - // AuthKey1: to.Ptr("**********"), - // AuthKey2: to.Ptr("**********"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Start.json -func ExampleIntegrationRuntimesClient_BeginStart() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewIntegrationRuntimesClient().BeginStart(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - res, err := poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeStatusResponse = armdatafactory.IntegrationRuntimeStatusResponse{ - // Name: to.Ptr("exampleManagedIntegrationRuntime"), - // Properties: &armdatafactory.ManagedIntegrationRuntimeStatus{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeManaged), - // DataFactoryName: to.Ptr("exampleFactoryName"), - // State: to.Ptr(armdatafactory.IntegrationRuntimeStateStarted), - // TypeProperties: &armdatafactory.ManagedIntegrationRuntimeStatusTypeProperties{ - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-13T21:11:01.8695494Z"); return t}()), - // Nodes: []*armdatafactory.ManagedIntegrationRuntimeNode{ - // }, - // OtherErrors: []*armdatafactory.ManagedIntegrationRuntimeError{ - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Stop.json -func ExampleIntegrationRuntimesClient_BeginStop() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewIntegrationRuntimesClient().BeginStop(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - _, err = poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_SyncCredentials.json -func ExampleIntegrationRuntimesClient_SyncCredentials() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewIntegrationRuntimesClient().SyncCredentials(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_GetMonitoringData.json -func ExampleIntegrationRuntimesClient_GetMonitoringData() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().GetMonitoringData(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeMonitoringData = armdatafactory.IntegrationRuntimeMonitoringData{ - // Name: to.Ptr("exampleIntegrationRuntime"), - // Nodes: []*armdatafactory.IntegrationRuntimeNodeMonitoringData{ - // { - // AvailableMemoryInMB: to.Ptr[int32](16740), - // ConcurrentJobsLimit: to.Ptr[int32](28), - // ConcurrentJobsRunning: to.Ptr[int32](0), - // CPUUtilization: to.Ptr[int32](15), - // NodeName: to.Ptr("Node_1"), - // ReceivedBytes: to.Ptr[float32](6.731423377990723), - // SentBytes: to.Ptr[float32](2.647491693496704), - // }}, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_Upgrade.json -func ExampleIntegrationRuntimesClient_Upgrade() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewIntegrationRuntimesClient().Upgrade(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_RemoveLinks.json -func ExampleIntegrationRuntimesClient_RemoveLinks() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewIntegrationRuntimesClient().RemoveLinks(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", armdatafactory.LinkedIntegrationRuntimeRequest{ - LinkedFactoryName: to.Ptr("exampleFactoryName-linked"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json -func ExampleIntegrationRuntimesClient_CreateLinkedIntegrationRuntime() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewIntegrationRuntimesClient().CreateLinkedIntegrationRuntime(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleIntegrationRuntime", armdatafactory.CreateLinkedIntegrationRuntimeRequest{ - Name: to.Ptr("bfa92911-9fb6-4fbe-8f23-beae87bc1c83"), - DataFactoryLocation: to.Ptr("West US"), - DataFactoryName: to.Ptr("e9955d6d-56ea-4be3-841c-52a12c1a9981"), - SubscriptionID: to.Ptr("061774c7-4b5a-4159-a55b-365581830283"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.IntegrationRuntimeStatusResponse = armdatafactory.IntegrationRuntimeStatusResponse{ - // Name: to.Ptr("exampleIntegrationRuntime"), - // Properties: &armdatafactory.SelfHostedIntegrationRuntimeStatus{ - // Type: to.Ptr(armdatafactory.IntegrationRuntimeTypeSelfHosted), - // DataFactoryName: to.Ptr("exampleFactoryName"), - // State: to.Ptr(armdatafactory.IntegrationRuntimeStateOnline), - // TypeProperties: &armdatafactory.SelfHostedIntegrationRuntimeStatusTypeProperties{ - // AutoUpdate: to.Ptr(armdatafactory.IntegrationRuntimeAutoUpdateOn), - // AutoUpdateETA: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-20T19:00:00Z"); return t}()), - // Capabilities: map[string]*string{ - // "connectedToResourceManager": to.Ptr("True"), - // "credentialInSync": to.Ptr("True"), - // "httpsPortEnabled": to.Ptr("True"), - // "nodeEnabled": to.Ptr("True"), - // "serviceBusConnected": to.Ptr("True"), - // }, - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T03:43:25.7055573Z"); return t}()), - // LatestVersion: to.Ptr("3.9.6774.1"), - // Links: []*armdatafactory.LinkedIntegrationRuntime{ - // { - // Name: to.Ptr("bfa92911-9fb6-4fbe-8f23-beae87bc1c83"), - // CreateTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T06:31:04.0617928Z"); return t}()), - // DataFactoryLocation: to.Ptr("West US"), - // DataFactoryName: to.Ptr("e9955d6d-56ea-4be3-841c-52a12c1a9981"), - // SubscriptionID: to.Ptr("061774c7-4b5a-4159-a55b-365581830283"), - // }}, - // LocalTimeZoneOffset: to.Ptr("PT8H"), - // Nodes: []*armdatafactory.SelfHostedIntegrationRuntimeNode{ - // { - // Capabilities: map[string]*string{ - // "connectedToResourceManager": to.Ptr("True"), - // "credentialInSync": to.Ptr("True"), - // "httpsPortEnabled": to.Ptr("True"), - // "nodeEnabled": to.Ptr("True"), - // "serviceBusConnected": to.Ptr("True"), - // }, - // HostServiceURI: to.Ptr("https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/"), - // IsActiveDispatcher: to.Ptr(true), - // LastConnectTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T06:30:46.6262976Z"); return t}()), - // LastStartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T03:45:30.8499851Z"); return t}()), - // LastUpdateResult: to.Ptr(armdatafactory.IntegrationRuntimeUpdateResultNone), - // MachineName: to.Ptr("YANZHANG-DT"), - // MaxConcurrentJobs: to.Ptr[int32](20), - // NodeName: to.Ptr("Node_1"), - // RegisterTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-17T03:44:55.8012825Z"); return t}()), - // Status: to.Ptr(armdatafactory.SelfHostedIntegrationRuntimeNodeStatusOnline), - // Version: to.Ptr("3.8.6743.6"), - // VersionStatus: to.Ptr("UpToDate"), - // }}, - // PushedVersion: to.Ptr("3.9.6774.1"), - // ScheduledUpdateDate: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-08-20T00:00:00Z"); return t}()), - // ServiceUrls: []*string{ - // to.Ptr("wu.frontend.int.clouddatahub-int.net"), - // to.Ptr("*.servicebus.windows.net")}, - // TaskQueueID: to.Ptr("823da112-f2d9-426b-a0d8-5f361b94f72a"), - // UpdateDelayOffset: to.Ptr("PT19H"), - // Version: to.Ptr("3.8.6743.6"), - // VersionStatus: to.Ptr("UpdateAvailable"), - // }, - // }, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/interfaces.go b/sdk/resourcemanager/datafactory/armdatafactory/interfaces.go new file mode 100644 index 000000000000..861a4f19a406 --- /dev/null +++ b/sdk/resourcemanager/datafactory/armdatafactory/interfaces.go @@ -0,0 +1,379 @@ +//go:build go1.18 +// +build go1.18 + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +package armdatafactory + +// ActivityClassification provides polymorphic access to related types. +// Call the interface's GetActivity() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *Activity, *AppendVariableActivity, *AzureDataExplorerCommandActivity, *AzureFunctionActivity, *AzureMLBatchExecutionActivity, +// - *AzureMLExecutePipelineActivity, *AzureMLUpdateResourceActivity, *ControlActivity, *CopyActivity, *CustomActivity, *DataLakeAnalyticsUSQLActivity, +// - *DatabricksNotebookActivity, *DatabricksSparkJarActivity, *DatabricksSparkPythonActivity, *DeleteActivity, *ExecuteDataFlowActivity, +// - *ExecutePipelineActivity, *ExecuteSSISPackageActivity, *ExecuteWranglingDataflowActivity, *ExecutionActivity, *FailActivity, +// - *FilterActivity, *ForEachActivity, *GetMetadataActivity, *HDInsightHiveActivity, *HDInsightMapReduceActivity, *HDInsightPigActivity, +// - *HDInsightSparkActivity, *HDInsightStreamingActivity, *IfConditionActivity, *LookupActivity, *SQLServerStoredProcedureActivity, +// - *ScriptActivity, *SetVariableActivity, *SwitchActivity, *SynapseNotebookActivity, *SynapseSparkJobDefinitionActivity, +// - *UntilActivity, *ValidationActivity, *WaitActivity, *WebActivity, *WebHookActivity +type ActivityClassification interface { + // GetActivity returns the Activity content of the underlying type. + GetActivity() *Activity +} + +// CompressionReadSettingsClassification provides polymorphic access to related types. +// Call the interface's GetCompressionReadSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *CompressionReadSettings, *TarGZipReadSettings, *TarReadSettings, *ZipDeflateReadSettings +type CompressionReadSettingsClassification interface { + // GetCompressionReadSettings returns the CompressionReadSettings content of the underlying type. + GetCompressionReadSettings() *CompressionReadSettings +} + +// ControlActivityClassification provides polymorphic access to related types. +// Call the interface's GetControlActivity() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AppendVariableActivity, *ControlActivity, *ExecutePipelineActivity, *FailActivity, *FilterActivity, *ForEachActivity, +// - *IfConditionActivity, *SetVariableActivity, *SwitchActivity, *UntilActivity, *ValidationActivity, *WaitActivity, *WebHookActivity +type ControlActivityClassification interface { + ActivityClassification + // GetControlActivity returns the ControlActivity content of the underlying type. + GetControlActivity() *ControlActivity +} + +// CopySinkClassification provides polymorphic access to related types. +// Call the interface's GetCopySink() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AvroSink, *AzureBlobFSSink, *AzureDataExplorerSink, *AzureDataLakeStoreSink, *AzureDatabricksDeltaLakeSink, *AzureMySQLSink, +// - *AzurePostgreSQLSink, *AzureQueueSink, *AzureSQLSink, *AzureSearchIndexSink, *AzureTableSink, *BinarySink, *BlobSink, +// - *CommonDataServiceForAppsSink, *CopySink, *CosmosDbMongoDbAPISink, *CosmosDbSQLAPISink, *DelimitedTextSink, *DocumentDbCollectionSink, +// - *DynamicsCrmSink, *DynamicsSink, *FileSystemSink, *InformixSink, *JSONSink, *MicrosoftAccessSink, *MongoDbAtlasSink, +// - *MongoDbV2Sink, *OdbcSink, *OracleSink, *OrcSink, *ParquetSink, *RestSink, *SQLDWSink, *SQLMISink, *SQLServerSink, *SQLSink, +// - *SalesforceServiceCloudSink, *SalesforceSink, *SapCloudForCustomerSink, *SnowflakeSink +type CopySinkClassification interface { + // GetCopySink returns the CopySink content of the underlying type. + GetCopySink() *CopySink +} + +// CopySourceClassification provides polymorphic access to related types. +// Call the interface's GetCopySource() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AmazonMWSSource, *AmazonRdsForOracleSource, *AmazonRdsForSQLServerSource, *AmazonRedshiftSource, *AvroSource, *AzureBlobFSSource, +// - *AzureDataExplorerSource, *AzureDataLakeStoreSource, *AzureDatabricksDeltaLakeSource, *AzureMariaDBSource, *AzureMySQLSource, +// - *AzurePostgreSQLSource, *AzureSQLSource, *AzureTableSource, *BinarySource, *BlobSource, *CassandraSource, *CommonDataServiceForAppsSource, +// - *ConcurSource, *CopySource, *CosmosDbMongoDbAPISource, *CosmosDbSQLAPISource, *CouchbaseSource, *Db2Source, *DelimitedTextSource, +// - *DocumentDbCollectionSource, *DrillSource, *DynamicsAXSource, *DynamicsCrmSource, *DynamicsSource, *EloquaSource, *ExcelSource, +// - *FileSystemSource, *GoogleAdWordsSource, *GoogleBigQuerySource, *GreenplumSource, *HBaseSource, *HTTPSource, *HdfsSource, +// - *HiveSource, *HubspotSource, *ImpalaSource, *InformixSource, *JSONSource, *JiraSource, *MagentoSource, *MariaDBSource, +// - *MarketoSource, *MicrosoftAccessSource, *MongoDbAtlasSource, *MongoDbSource, *MongoDbV2Source, *MySQLSource, *NetezzaSource, +// - *ODataSource, *OdbcSource, *Office365Source, *OracleServiceCloudSource, *OracleSource, *OrcSource, *ParquetSource, *PaypalSource, +// - *PhoenixSource, *PostgreSQLSource, *PrestoSource, *QuickBooksSource, *RelationalSource, *ResponsysSource, *RestSource, +// - *SQLDWSource, *SQLMISource, *SQLServerSource, *SQLSource, *SalesforceMarketingCloudSource, *SalesforceServiceCloudSource, +// - *SalesforceSource, *SapBwSource, *SapCloudForCustomerSource, *SapEccSource, *SapHanaSource, *SapOdpSource, *SapOpenHubSource, +// - *SapTableSource, *ServiceNowSource, *SharePointOnlineListSource, *ShopifySource, *SnowflakeSource, *SparkSource, *SquareSource, +// - *SybaseSource, *TabularSource, *TeradataSource, *VerticaSource, *WebSource, *XMLSource, *XeroSource, *ZohoSource +type CopySourceClassification interface { + // GetCopySource returns the CopySource content of the underlying type. + GetCopySource() *CopySource +} + +// CredentialClassification provides polymorphic access to related types. +// Call the interface's GetCredential() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *Credential, *ManagedIdentityCredential, *ServicePrincipalCredential +type CredentialClassification interface { + // GetCredential returns the Credential content of the underlying type. + GetCredential() *Credential +} + +// CustomSetupBaseClassification provides polymorphic access to related types. +// Call the interface's GetCustomSetupBase() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AzPowerShellSetup, *CmdkeySetup, *ComponentSetup, *CustomSetupBase, *EnvironmentVariableSetup +type CustomSetupBaseClassification interface { + // GetCustomSetupBase returns the CustomSetupBase content of the underlying type. + GetCustomSetupBase() *CustomSetupBase +} + +// DataFlowClassification provides polymorphic access to related types. +// Call the interface's GetDataFlow() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *DataFlow, *Flowlet, *MappingDataFlow, *WranglingDataFlow +type DataFlowClassification interface { + // GetDataFlow returns the DataFlow content of the underlying type. + GetDataFlow() *DataFlow +} + +// DatasetClassification provides polymorphic access to related types. +// Call the interface's GetDataset() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AmazonMWSObjectDataset, *AmazonRdsForOracleTableDataset, *AmazonRdsForSQLServerTableDataset, *AmazonRedshiftTableDataset, +// - *AmazonS3Dataset, *AvroDataset, *AzureBlobDataset, *AzureBlobFSDataset, *AzureDataExplorerTableDataset, *AzureDataLakeStoreDataset, +// - *AzureDatabricksDeltaLakeDataset, *AzureMariaDBTableDataset, *AzureMySQLTableDataset, *AzurePostgreSQLTableDataset, *AzureSQLDWTableDataset, +// - *AzureSQLMITableDataset, *AzureSQLTableDataset, *AzureSearchIndexDataset, *AzureTableDataset, *BinaryDataset, *CassandraTableDataset, +// - *CommonDataServiceForAppsEntityDataset, *ConcurObjectDataset, *CosmosDbMongoDbAPICollectionDataset, *CosmosDbSQLAPICollectionDataset, +// - *CouchbaseTableDataset, *CustomDataset, *Dataset, *Db2TableDataset, *DelimitedTextDataset, *DocumentDbCollectionDataset, +// - *DrillTableDataset, *DynamicsAXResourceDataset, *DynamicsCrmEntityDataset, *DynamicsEntityDataset, *EloquaObjectDataset, +// - *ExcelDataset, *FileShareDataset, *GoogleAdWordsObjectDataset, *GoogleBigQueryObjectDataset, *GreenplumTableDataset, +// - *HBaseObjectDataset, *HTTPDataset, *HiveObjectDataset, *HubspotObjectDataset, *ImpalaObjectDataset, *InformixTableDataset, +// - *JSONDataset, *JiraObjectDataset, *MagentoObjectDataset, *MariaDBTableDataset, *MarketoObjectDataset, *MicrosoftAccessTableDataset, +// - *MongoDbAtlasCollectionDataset, *MongoDbCollectionDataset, *MongoDbV2CollectionDataset, *MySQLTableDataset, *NetezzaTableDataset, +// - *ODataResourceDataset, *OdbcTableDataset, *Office365Dataset, *OracleServiceCloudObjectDataset, *OracleTableDataset, *OrcDataset, +// - *ParquetDataset, *PaypalObjectDataset, *PhoenixObjectDataset, *PostgreSQLTableDataset, *PrestoObjectDataset, *QuickBooksObjectDataset, +// - *RelationalTableDataset, *ResponsysObjectDataset, *RestResourceDataset, *SQLServerTableDataset, *SalesforceMarketingCloudObjectDataset, +// - *SalesforceObjectDataset, *SalesforceServiceCloudObjectDataset, *SapBwCubeDataset, *SapCloudForCustomerResourceDataset, +// - *SapEccResourceDataset, *SapHanaTableDataset, *SapOdpResourceDataset, *SapOpenHubTableDataset, *SapTableResourceDataset, +// - *ServiceNowObjectDataset, *SharePointOnlineListResourceDataset, *ShopifyObjectDataset, *SnowflakeDataset, *SparkObjectDataset, +// - *SquareObjectDataset, *SybaseTableDataset, *TeradataTableDataset, *VerticaTableDataset, *WebTableDataset, *XMLDataset, +// - *XeroObjectDataset, *ZohoObjectDataset +type DatasetClassification interface { + // GetDataset returns the Dataset content of the underlying type. + GetDataset() *Dataset +} + +// DatasetLocationClassification provides polymorphic access to related types. +// Call the interface's GetDatasetLocation() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AmazonS3CompatibleLocation, *AmazonS3Location, *AzureBlobFSLocation, *AzureBlobStorageLocation, *AzureDataLakeStoreLocation, +// - *AzureFileStorageLocation, *DatasetLocation, *FileServerLocation, *FtpServerLocation, *GoogleCloudStorageLocation, *HTTPServerLocation, +// - *HdfsLocation, *OracleCloudStorageLocation, *SftpLocation +type DatasetLocationClassification interface { + // GetDatasetLocation returns the DatasetLocation content of the underlying type. + GetDatasetLocation() *DatasetLocation +} + +// DatasetStorageFormatClassification provides polymorphic access to related types. +// Call the interface's GetDatasetStorageFormat() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AvroFormat, *DatasetStorageFormat, *JSONFormat, *OrcFormat, *ParquetFormat, *TextFormat +type DatasetStorageFormatClassification interface { + // GetDatasetStorageFormat returns the DatasetStorageFormat content of the underlying type. + GetDatasetStorageFormat() *DatasetStorageFormat +} + +// DependencyReferenceClassification provides polymorphic access to related types. +// Call the interface's GetDependencyReference() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *DependencyReference, *SelfDependencyTumblingWindowTriggerReference, *TriggerDependencyReference, *TumblingWindowTriggerDependencyReference +type DependencyReferenceClassification interface { + // GetDependencyReference returns the DependencyReference content of the underlying type. + GetDependencyReference() *DependencyReference +} + +// ExecutionActivityClassification provides polymorphic access to related types. +// Call the interface's GetExecutionActivity() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AzureDataExplorerCommandActivity, *AzureFunctionActivity, *AzureMLBatchExecutionActivity, *AzureMLExecutePipelineActivity, +// - *AzureMLUpdateResourceActivity, *CopyActivity, *CustomActivity, *DataLakeAnalyticsUSQLActivity, *DatabricksNotebookActivity, +// - *DatabricksSparkJarActivity, *DatabricksSparkPythonActivity, *DeleteActivity, *ExecuteDataFlowActivity, *ExecuteSSISPackageActivity, +// - *ExecutionActivity, *GetMetadataActivity, *HDInsightHiveActivity, *HDInsightMapReduceActivity, *HDInsightPigActivity, +// - *HDInsightSparkActivity, *HDInsightStreamingActivity, *LookupActivity, *SQLServerStoredProcedureActivity, *ScriptActivity, +// - *SynapseNotebookActivity, *SynapseSparkJobDefinitionActivity, *WebActivity +type ExecutionActivityClassification interface { + ActivityClassification + // GetExecutionActivity returns the ExecutionActivity content of the underlying type. + GetExecutionActivity() *ExecutionActivity +} + +// ExportSettingsClassification provides polymorphic access to related types. +// Call the interface's GetExportSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AzureDatabricksDeltaLakeExportCommand, *ExportSettings, *SnowflakeExportCopyCommand +type ExportSettingsClassification interface { + // GetExportSettings returns the ExportSettings content of the underlying type. + GetExportSettings() *ExportSettings +} + +// FactoryRepoConfigurationClassification provides polymorphic access to related types. +// Call the interface's GetFactoryRepoConfiguration() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *FactoryGitHubConfiguration, *FactoryRepoConfiguration, *FactoryVSTSConfiguration +type FactoryRepoConfigurationClassification interface { + // GetFactoryRepoConfiguration returns the FactoryRepoConfiguration content of the underlying type. + GetFactoryRepoConfiguration() *FactoryRepoConfiguration +} + +// FormatReadSettingsClassification provides polymorphic access to related types. +// Call the interface's GetFormatReadSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *BinaryReadSettings, *DelimitedTextReadSettings, *FormatReadSettings, *JSONReadSettings, *ParquetReadSettings, *XMLReadSettings +type FormatReadSettingsClassification interface { + // GetFormatReadSettings returns the FormatReadSettings content of the underlying type. + GetFormatReadSettings() *FormatReadSettings +} + +// FormatWriteSettingsClassification provides polymorphic access to related types. +// Call the interface's GetFormatWriteSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AvroWriteSettings, *DelimitedTextWriteSettings, *FormatWriteSettings, *JSONWriteSettings, *OrcWriteSettings, *ParquetWriteSettings +type FormatWriteSettingsClassification interface { + // GetFormatWriteSettings returns the FormatWriteSettings content of the underlying type. + GetFormatWriteSettings() *FormatWriteSettings +} + +// ImportSettingsClassification provides polymorphic access to related types. +// Call the interface's GetImportSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AzureDatabricksDeltaLakeImportCommand, *ImportSettings, *SnowflakeImportCopyCommand +type ImportSettingsClassification interface { + // GetImportSettings returns the ImportSettings content of the underlying type. + GetImportSettings() *ImportSettings +} + +// IntegrationRuntimeClassification provides polymorphic access to related types. +// Call the interface's GetIntegrationRuntime() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *IntegrationRuntime, *ManagedIntegrationRuntime, *SelfHostedIntegrationRuntime +type IntegrationRuntimeClassification interface { + // GetIntegrationRuntime returns the IntegrationRuntime content of the underlying type. + GetIntegrationRuntime() *IntegrationRuntime +} + +// IntegrationRuntimeStatusClassification provides polymorphic access to related types. +// Call the interface's GetIntegrationRuntimeStatus() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *IntegrationRuntimeStatus, *ManagedIntegrationRuntimeStatus, *SelfHostedIntegrationRuntimeStatus +type IntegrationRuntimeStatusClassification interface { + // GetIntegrationRuntimeStatus returns the IntegrationRuntimeStatus content of the underlying type. + GetIntegrationRuntimeStatus() *IntegrationRuntimeStatus +} + +// LinkedIntegrationRuntimeTypeClassification provides polymorphic access to related types. +// Call the interface's GetLinkedIntegrationRuntimeType() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *LinkedIntegrationRuntimeKeyAuthorization, *LinkedIntegrationRuntimeRbacAuthorization, *LinkedIntegrationRuntimeType +type LinkedIntegrationRuntimeTypeClassification interface { + // GetLinkedIntegrationRuntimeType returns the LinkedIntegrationRuntimeType content of the underlying type. + GetLinkedIntegrationRuntimeType() *LinkedIntegrationRuntimeType +} + +// LinkedServiceClassification provides polymorphic access to related types. +// Call the interface's GetLinkedService() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AmazonMWSLinkedService, *AmazonRdsForOracleLinkedService, *AmazonRdsForSQLServerLinkedService, *AmazonRedshiftLinkedService, +// - *AmazonS3CompatibleLinkedService, *AmazonS3LinkedService, *AppFiguresLinkedService, *AsanaLinkedService, *AzureBatchLinkedService, +// - *AzureBlobFSLinkedService, *AzureBlobStorageLinkedService, *AzureDataExplorerLinkedService, *AzureDataLakeAnalyticsLinkedService, +// - *AzureDataLakeStoreLinkedService, *AzureDatabricksDeltaLakeLinkedService, *AzureDatabricksLinkedService, *AzureFileStorageLinkedService, +// - *AzureFunctionLinkedService, *AzureKeyVaultLinkedService, *AzureMLLinkedService, *AzureMLServiceLinkedService, *AzureMariaDBLinkedService, +// - *AzureMySQLLinkedService, *AzurePostgreSQLLinkedService, *AzureSQLDWLinkedService, *AzureSQLDatabaseLinkedService, *AzureSQLMILinkedService, +// - *AzureSearchLinkedService, *AzureStorageLinkedService, *AzureSynapseArtifactsLinkedService, *AzureTableStorageLinkedService, +// - *CassandraLinkedService, *CommonDataServiceForAppsLinkedService, *ConcurLinkedService, *CosmosDbLinkedService, *CosmosDbMongoDbAPILinkedService, +// - *CouchbaseLinkedService, *CustomDataSourceLinkedService, *DataworldLinkedService, *Db2LinkedService, *DrillLinkedService, +// - *DynamicsAXLinkedService, *DynamicsCrmLinkedService, *DynamicsLinkedService, *EloquaLinkedService, *FileServerLinkedService, +// - *FtpServerLinkedService, *GoogleAdWordsLinkedService, *GoogleBigQueryLinkedService, *GoogleCloudStorageLinkedService, +// - *GoogleSheetsLinkedService, *GreenplumLinkedService, *HBaseLinkedService, *HDInsightLinkedService, *HDInsightOnDemandLinkedService, +// - *HTTPLinkedService, *HdfsLinkedService, *HiveLinkedService, *HubspotLinkedService, *ImpalaLinkedService, *InformixLinkedService, +// - *JiraLinkedService, *LinkedService, *MagentoLinkedService, *MariaDBLinkedService, *MarketoLinkedService, *MicrosoftAccessLinkedService, +// - *MongoDbAtlasLinkedService, *MongoDbLinkedService, *MongoDbV2LinkedService, *MySQLLinkedService, *NetezzaLinkedService, +// - *ODataLinkedService, *OdbcLinkedService, *Office365LinkedService, *OracleCloudStorageLinkedService, *OracleLinkedService, +// - *OracleServiceCloudLinkedService, *PaypalLinkedService, *PhoenixLinkedService, *PostgreSQLLinkedService, *PrestoLinkedService, +// - *QuickBooksLinkedService, *QuickbaseLinkedService, *ResponsysLinkedService, *RestServiceLinkedService, *SQLServerLinkedService, +// - *SalesforceLinkedService, *SalesforceMarketingCloudLinkedService, *SalesforceServiceCloudLinkedService, *SapBWLinkedService, +// - *SapCloudForCustomerLinkedService, *SapEccLinkedService, *SapHanaLinkedService, *SapOdpLinkedService, *SapOpenHubLinkedService, +// - *SapTableLinkedService, *ServiceNowLinkedService, *SftpServerLinkedService, *SharePointOnlineListLinkedService, *ShopifyLinkedService, +// - *SmartsheetLinkedService, *SnowflakeLinkedService, *SparkLinkedService, *SquareLinkedService, *SybaseLinkedService, *TeamDeskLinkedService, +// - *TeradataLinkedService, *TwilioLinkedService, *VerticaLinkedService, *WebLinkedService, *XeroLinkedService, *ZendeskLinkedService, +// - *ZohoLinkedService +type LinkedServiceClassification interface { + // GetLinkedService returns the LinkedService content of the underlying type. + GetLinkedService() *LinkedService +} + +// MultiplePipelineTriggerClassification provides polymorphic access to related types. +// Call the interface's GetMultiplePipelineTrigger() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *BlobEventsTrigger, *BlobTrigger, *CustomEventsTrigger, *MultiplePipelineTrigger, *ScheduleTrigger +type MultiplePipelineTriggerClassification interface { + TriggerClassification + // GetMultiplePipelineTrigger returns the MultiplePipelineTrigger content of the underlying type. + GetMultiplePipelineTrigger() *MultiplePipelineTrigger +} + +// SecretBaseClassification provides polymorphic access to related types. +// Call the interface's GetSecretBase() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AzureKeyVaultSecretReference, *SecretBase, *SecureString +type SecretBaseClassification interface { + // GetSecretBase returns the SecretBase content of the underlying type. + GetSecretBase() *SecretBase +} + +// SsisObjectMetadataClassification provides polymorphic access to related types. +// Call the interface's GetSsisObjectMetadata() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *SsisEnvironment, *SsisFolder, *SsisObjectMetadata, *SsisPackage, *SsisProject +type SsisObjectMetadataClassification interface { + // GetSsisObjectMetadata returns the SsisObjectMetadata content of the underlying type. + GetSsisObjectMetadata() *SsisObjectMetadata +} + +// StoreReadSettingsClassification provides polymorphic access to related types. +// Call the interface's GetStoreReadSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AmazonS3CompatibleReadSettings, *AmazonS3ReadSettings, *AzureBlobFSReadSettings, *AzureBlobStorageReadSettings, *AzureDataLakeStoreReadSettings, +// - *AzureFileStorageReadSettings, *FileServerReadSettings, *FtpReadSettings, *GoogleCloudStorageReadSettings, *HTTPReadSettings, +// - *HdfsReadSettings, *OracleCloudStorageReadSettings, *SftpReadSettings, *StoreReadSettings +type StoreReadSettingsClassification interface { + // GetStoreReadSettings returns the StoreReadSettings content of the underlying type. + GetStoreReadSettings() *StoreReadSettings +} + +// StoreWriteSettingsClassification provides polymorphic access to related types. +// Call the interface's GetStoreWriteSettings() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AzureBlobFSWriteSettings, *AzureBlobStorageWriteSettings, *AzureDataLakeStoreWriteSettings, *AzureFileStorageWriteSettings, +// - *FileServerWriteSettings, *SftpWriteSettings, *StoreWriteSettings +type StoreWriteSettingsClassification interface { + // GetStoreWriteSettings returns the StoreWriteSettings content of the underlying type. + GetStoreWriteSettings() *StoreWriteSettings +} + +// TabularSourceClassification provides polymorphic access to related types. +// Call the interface's GetTabularSource() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *AmazonMWSSource, *AmazonRdsForSQLServerSource, *AmazonRedshiftSource, *AzureMariaDBSource, *AzureMySQLSource, *AzurePostgreSQLSource, +// - *AzureSQLSource, *AzureTableSource, *CassandraSource, *ConcurSource, *CouchbaseSource, *Db2Source, *DrillSource, *DynamicsAXSource, +// - *EloquaSource, *GoogleAdWordsSource, *GoogleBigQuerySource, *GreenplumSource, *HBaseSource, *HiveSource, *HubspotSource, +// - *ImpalaSource, *InformixSource, *JiraSource, *MagentoSource, *MariaDBSource, *MarketoSource, *MySQLSource, *NetezzaSource, +// - *OdbcSource, *OracleServiceCloudSource, *PaypalSource, *PhoenixSource, *PostgreSQLSource, *PrestoSource, *QuickBooksSource, +// - *ResponsysSource, *SQLDWSource, *SQLMISource, *SQLServerSource, *SQLSource, *SalesforceMarketingCloudSource, *SalesforceSource, +// - *SapBwSource, *SapCloudForCustomerSource, *SapEccSource, *SapHanaSource, *SapOdpSource, *SapOpenHubSource, *SapTableSource, +// - *ServiceNowSource, *ShopifySource, *SparkSource, *SquareSource, *SybaseSource, *TabularSource, *TeradataSource, *VerticaSource, +// - *XeroSource, *ZohoSource +type TabularSourceClassification interface { + CopySourceClassification + // GetTabularSource returns the TabularSource content of the underlying type. + GetTabularSource() *TabularSource +} + +// TriggerClassification provides polymorphic access to related types. +// Call the interface's GetTrigger() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *BlobEventsTrigger, *BlobTrigger, *ChainingTrigger, *CustomEventsTrigger, *MultiplePipelineTrigger, *RerunTumblingWindowTrigger, +// - *ScheduleTrigger, *Trigger, *TumblingWindowTrigger +type TriggerClassification interface { + // GetTrigger returns the Trigger content of the underlying type. + GetTrigger() *Trigger +} + +// TriggerDependencyReferenceClassification provides polymorphic access to related types. +// Call the interface's GetTriggerDependencyReference() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *TriggerDependencyReference, *TumblingWindowTriggerDependencyReference +type TriggerDependencyReferenceClassification interface { + DependencyReferenceClassification + // GetTriggerDependencyReference returns the TriggerDependencyReference content of the underlying type. + GetTriggerDependencyReference() *TriggerDependencyReference +} + +// WebLinkedServiceTypePropertiesClassification provides polymorphic access to related types. +// Call the interface's GetWebLinkedServiceTypeProperties() method to access the common type. +// Use a type switch to determine the concrete type. The possible types are: +// - *WebAnonymousAuthentication, *WebBasicAuthentication, *WebClientCertificateAuthentication, *WebLinkedServiceTypeProperties +type WebLinkedServiceTypePropertiesClassification interface { + // GetWebLinkedServiceTypeProperties returns the WebLinkedServiceTypeProperties content of the underlying type. + GetWebLinkedServiceTypeProperties() *WebLinkedServiceTypeProperties +} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client.go b/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client.go index 916a55c11fa8..3195b2b6b2a1 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewLinkedServicesClient(subscriptionID string, credential azcore.TokenCrede // - options - LinkedServicesClientCreateOrUpdateOptions contains the optional parameters for the LinkedServicesClient.CreateOrUpdate // method. func (client *LinkedServicesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, linkedServiceName string, linkedService LinkedServiceResource, options *LinkedServicesClientCreateOrUpdateOptions) (LinkedServicesClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, linkedServiceName, linkedService, options) if err != nil { return LinkedServicesClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return LinkedServicesClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return LinkedServicesClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return LinkedServicesClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -99,7 +101,10 @@ func (client *LinkedServicesClient) createOrUpdateCreateRequest(ctx context.Cont req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, linkedService) + if err := runtime.MarshalAsJSON(req, linkedService); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -120,16 +125,18 @@ func (client *LinkedServicesClient) createOrUpdateHandleResponse(resp *http.Resp // - linkedServiceName - The linked service name. // - options - LinkedServicesClientDeleteOptions contains the optional parameters for the LinkedServicesClient.Delete method. func (client *LinkedServicesClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, linkedServiceName string, options *LinkedServicesClientDeleteOptions) (LinkedServicesClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, linkedServiceName, options) if err != nil { return LinkedServicesClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return LinkedServicesClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return LinkedServicesClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return LinkedServicesClientDeleteResponse{}, err } return LinkedServicesClientDeleteResponse{}, nil } @@ -173,18 +180,21 @@ func (client *LinkedServicesClient) deleteCreateRequest(ctx context.Context, res // - linkedServiceName - The linked service name. // - options - LinkedServicesClientGetOptions contains the optional parameters for the LinkedServicesClient.Get method. func (client *LinkedServicesClient) Get(ctx context.Context, resourceGroupName string, factoryName string, linkedServiceName string, options *LinkedServicesClientGetOptions) (LinkedServicesClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, linkedServiceName, options) if err != nil { return LinkedServicesClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return LinkedServicesClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return LinkedServicesClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return LinkedServicesClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client_example_test.go deleted file mode 100644 index 19160d465624..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/linkedservices_client_example_test.go +++ /dev/null @@ -1,212 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_ListByFactory.json -func ExampleLinkedServicesClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewLinkedServicesClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.LinkedServiceListResponse = armdatafactory.LinkedServiceListResponse{ - // Value: []*armdatafactory.LinkedServiceResource{ - // { - // Name: to.Ptr("exampleLinkedService"), - // Type: to.Ptr("Microsoft.DataFactory/factories/linkedservices"), - // Etag: to.Ptr("0a0064d4-0000-0000-0000-5b245bd00000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/linkedservices/exampleLinkedService"), - // Properties: &armdatafactory.AzureStorageLinkedService{ - // Type: to.Ptr("AzureStorage"), - // Description: to.Ptr("Example description"), - // TypeProperties: &armdatafactory.AzureStorageLinkedServiceTypeProperties{ - // ConnectionString: map[string]any{ - // "type": "SecureString", - // "value": "**********", - // }, - // EncryptedCredential: to.Ptr("ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRGLURPR0ZPT0QtWUFOWkhBTkctV1VfMGI2M2EyMmYtMGEzNC00NDg2LWIzMDktNzM0NTlkODUyY2Q1Ig0KfQ=="), - // }, - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Create.json -func ExampleLinkedServicesClient_CreateOrUpdate_linkedServicesCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewLinkedServicesClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", armdatafactory.LinkedServiceResource{ - Properties: &armdatafactory.AzureStorageLinkedService{ - Type: to.Ptr("AzureStorage"), - TypeProperties: &armdatafactory.AzureStorageLinkedServiceTypeProperties{ - ConnectionString: map[string]any{ - "type": "SecureString", - "value": "DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=", - }, - }, - }, - }, &armdatafactory.LinkedServicesClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.LinkedServiceResource = armdatafactory.LinkedServiceResource{ - // Name: to.Ptr("exampleLinkedService"), - // Type: to.Ptr("Microsoft.DataFactory/factories/linkedservices"), - // Etag: to.Ptr("0a0062d4-0000-0000-0000-5b245bcf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/linkedservices/exampleLinkedService"), - // Properties: &armdatafactory.AzureStorageLinkedService{ - // Type: to.Ptr("AzureStorage"), - // TypeProperties: &armdatafactory.AzureStorageLinkedServiceTypeProperties{ - // ConnectionString: map[string]any{ - // "type": "SecureString", - // "value": "**********", - // }, - // EncryptedCredential: to.Ptr("ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRGLURPR0ZPT0QtWUFOWkhBTkctV1VfM2FiMTk0NjYtNWUxNi00NzU1LWJlNzktMjI2ZTVmZWU3YzY0Ig0KfQ=="), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Update.json -func ExampleLinkedServicesClient_CreateOrUpdate_linkedServicesUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewLinkedServicesClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", armdatafactory.LinkedServiceResource{ - Properties: &armdatafactory.AzureStorageLinkedService{ - Type: to.Ptr("AzureStorage"), - Description: to.Ptr("Example description"), - TypeProperties: &armdatafactory.AzureStorageLinkedServiceTypeProperties{ - ConnectionString: map[string]any{ - "type": "SecureString", - "value": "DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;AccountKey=", - }, - }, - }, - }, &armdatafactory.LinkedServicesClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.LinkedServiceResource = armdatafactory.LinkedServiceResource{ - // Name: to.Ptr("exampleLinkedService"), - // Type: to.Ptr("Microsoft.DataFactory/factories/linkedservices"), - // Etag: to.Ptr("0a0064d4-0000-0000-0000-5b245bd00000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/linkedservices/exampleLinkedService"), - // Properties: &armdatafactory.AzureStorageLinkedService{ - // Type: to.Ptr("AzureStorage"), - // Description: to.Ptr("Example description"), - // TypeProperties: &armdatafactory.AzureStorageLinkedServiceTypeProperties{ - // ConnectionString: map[string]any{ - // "type": "SecureString", - // "value": "**********", - // }, - // EncryptedCredential: to.Ptr("ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkRGLURPR0ZPT0QtWUFOWkhBTkctV1VfMGI2M2EyMmYtMGEzNC00NDg2LWIzMDktNzM0NTlkODUyY2Q1Ig0KfQ=="), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Get.json -func ExampleLinkedServicesClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewLinkedServicesClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", &armdatafactory.LinkedServicesClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.LinkedServiceResource = armdatafactory.LinkedServiceResource{ - // Name: to.Ptr("exampleLinkedService"), - // Type: to.Ptr("Microsoft.DataFactory/factories/linkedservices"), - // Etag: to.Ptr("1500474f-0000-0200-0000-5cbe090d0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/linkedservices/exampleLinkedService"), - // Properties: &armdatafactory.AzureStorageLinkedService{ - // Type: to.Ptr("AzureStorage"), - // Description: to.Ptr("Example description"), - // TypeProperties: &armdatafactory.AzureStorageLinkedServiceTypeProperties{ - // ConnectionString: map[string]any{ - // "type": "SecureString", - // "value": "**********", - // }, - // EncryptedCredential: to.Ptr("ew0KICAiVmVyc2lvbiI6ICIyMDE3LTExLTMwIiwNCiAgIlByb3RlY3Rpb25Nb2RlIjogIktleSIsDQogICJTZWNyZXRDb250ZW50VHlwZSI6ICJQbGFpbnRleHQiLA0KICAiQ3JlZGVudGlhbElkIjogIkhWVEVTVEVYQU1QTEVTXzg5MjU1NGY0LTViNGItNDFhOS1hYWYxLTg4ZWI5ZDBjOWIzMyINCn0="), - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/LinkedServices_Delete.json -func ExampleLinkedServicesClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewLinkedServicesClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleLinkedService", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client.go b/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client.go index 8a2e5d339df9..25e5efdbea52 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -56,18 +55,21 @@ func NewManagedPrivateEndpointsClient(subscriptionID string, credential azcore.T // - options - ManagedPrivateEndpointsClientCreateOrUpdateOptions contains the optional parameters for the ManagedPrivateEndpointsClient.CreateOrUpdate // method. func (client *ManagedPrivateEndpointsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, managedPrivateEndpointName string, managedPrivateEndpoint ManagedPrivateEndpointResource, options *ManagedPrivateEndpointsClientCreateOrUpdateOptions) (ManagedPrivateEndpointsClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, managedPrivateEndpoint, options) if err != nil { return ManagedPrivateEndpointsClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ManagedPrivateEndpointsClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ManagedPrivateEndpointsClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ManagedPrivateEndpointsClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -104,7 +106,10 @@ func (client *ManagedPrivateEndpointsClient) createOrUpdateCreateRequest(ctx con req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, managedPrivateEndpoint) + if err := runtime.MarshalAsJSON(req, managedPrivateEndpoint); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -127,16 +132,18 @@ func (client *ManagedPrivateEndpointsClient) createOrUpdateHandleResponse(resp * // - options - ManagedPrivateEndpointsClientDeleteOptions contains the optional parameters for the ManagedPrivateEndpointsClient.Delete // method. func (client *ManagedPrivateEndpointsClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, managedPrivateEndpointName string, options *ManagedPrivateEndpointsClientDeleteOptions) (ManagedPrivateEndpointsClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, options) if err != nil { return ManagedPrivateEndpointsClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ManagedPrivateEndpointsClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return ManagedPrivateEndpointsClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return ManagedPrivateEndpointsClientDeleteResponse{}, err } return ManagedPrivateEndpointsClientDeleteResponse{}, nil } @@ -186,18 +193,21 @@ func (client *ManagedPrivateEndpointsClient) deleteCreateRequest(ctx context.Con // - options - ManagedPrivateEndpointsClientGetOptions contains the optional parameters for the ManagedPrivateEndpointsClient.Get // method. func (client *ManagedPrivateEndpointsClient) Get(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, managedPrivateEndpointName string, options *ManagedPrivateEndpointsClientGetOptions) (ManagedPrivateEndpointsClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, managedVirtualNetworkName, managedPrivateEndpointName, options) if err != nil { return ManagedPrivateEndpointsClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ManagedPrivateEndpointsClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ManagedPrivateEndpointsClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ManagedPrivateEndpointsClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client_example_test.go deleted file mode 100644 index 500210c3a409..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/managedprivateendpoints_client_example_test.go +++ /dev/null @@ -1,164 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_ListByFactory.json -func ExampleManagedPrivateEndpointsClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewManagedPrivateEndpointsClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.ManagedPrivateEndpointListResponse = armdatafactory.ManagedPrivateEndpointListResponse{ - // Value: []*armdatafactory.ManagedPrivateEndpointResource{ - // { - // Name: to.Ptr("exampleManagedPrivateEndpointName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/managedVirtualNetworks/managedPrivateEndpoints"), - // Etag: to.Ptr("000046c4-0000-0000-0000-5b2198bf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/exampleManagedVirtualNetworkName/managedPrivateEndpoints/exampleManagedPrivateEndpointName"), - // Properties: &armdatafactory.ManagedPrivateEndpoint{ - // ConnectionState: &armdatafactory.ConnectionStateProperties{ - // Description: to.Ptr(""), - // ActionsRequired: to.Ptr("None"), - // Status: to.Ptr("Pending"), - // }, - // Fqdns: []*string{ - // }, - // GroupID: to.Ptr("blob"), - // PrivateLinkResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage"), - // ProvisioningState: to.Ptr("Succeeded"), - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Create.json -func ExampleManagedPrivateEndpointsClient_CreateOrUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewManagedPrivateEndpointsClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", armdatafactory.ManagedPrivateEndpointResource{ - Properties: &armdatafactory.ManagedPrivateEndpoint{ - Fqdns: []*string{}, - GroupID: to.Ptr("blob"), - PrivateLinkResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage"), - }, - }, &armdatafactory.ManagedPrivateEndpointsClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ManagedPrivateEndpointResource = armdatafactory.ManagedPrivateEndpointResource{ - // Name: to.Ptr("exampleManagedPrivateEndpointName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/managedVirtualNetworks/managedPrivateEndpoints"), - // Etag: to.Ptr("000046c4-0000-0000-0000-5b2198bf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/exampleManagedVirtualNetworkName/managedPrivateEndpoints/exampleManagedPrivateEndpointName"), - // Properties: &armdatafactory.ManagedPrivateEndpoint{ - // ConnectionState: &armdatafactory.ConnectionStateProperties{ - // Description: to.Ptr(""), - // ActionsRequired: to.Ptr("None"), - // Status: to.Ptr("Pending"), - // }, - // Fqdns: []*string{ - // }, - // GroupID: to.Ptr("blob"), - // PrivateLinkResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage"), - // ProvisioningState: to.Ptr("Succeeded"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Get.json -func ExampleManagedPrivateEndpointsClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewManagedPrivateEndpointsClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", &armdatafactory.ManagedPrivateEndpointsClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ManagedPrivateEndpointResource = armdatafactory.ManagedPrivateEndpointResource{ - // Name: to.Ptr("exampleManagedPrivateEndpointName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/managedVirtualNetworks/managedPrivateEndpoints"), - // Etag: to.Ptr("000046c4-0000-0000-0000-5b2198bf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/exampleManagedVirtualNetworkName/managedPrivateEndpoints/exampleManagedPrivateEndpointName"), - // Properties: &armdatafactory.ManagedPrivateEndpoint{ - // ConnectionState: &armdatafactory.ConnectionStateProperties{ - // Description: to.Ptr(""), - // ActionsRequired: to.Ptr("None"), - // Status: to.Ptr("Pending"), - // }, - // Fqdns: []*string{ - // }, - // GroupID: to.Ptr("blob"), - // PrivateLinkResourceID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.Storage/storageAccounts/exampleBlobStorage"), - // ProvisioningState: to.Ptr("Succeeded"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedPrivateEndpoints_Delete.json -func ExampleManagedPrivateEndpointsClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewManagedPrivateEndpointsClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", "exampleManagedPrivateEndpointName", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client.go b/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client.go index 0d19559d6306..83202e08da16 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -55,18 +54,21 @@ func NewManagedVirtualNetworksClient(subscriptionID string, credential azcore.To // - options - ManagedVirtualNetworksClientCreateOrUpdateOptions contains the optional parameters for the ManagedVirtualNetworksClient.CreateOrUpdate // method. func (client *ManagedVirtualNetworksClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, managedVirtualNetwork ManagedVirtualNetworkResource, options *ManagedVirtualNetworksClientCreateOrUpdateOptions) (ManagedVirtualNetworksClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, managedVirtualNetworkName, managedVirtualNetwork, options) if err != nil { return ManagedVirtualNetworksClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ManagedVirtualNetworksClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ManagedVirtualNetworksClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ManagedVirtualNetworksClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -99,7 +101,10 @@ func (client *ManagedVirtualNetworksClient) createOrUpdateCreateRequest(ctx cont req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, managedVirtualNetwork) + if err := runtime.MarshalAsJSON(req, managedVirtualNetwork); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -121,18 +126,21 @@ func (client *ManagedVirtualNetworksClient) createOrUpdateHandleResponse(resp *h // - options - ManagedVirtualNetworksClientGetOptions contains the optional parameters for the ManagedVirtualNetworksClient.Get // method. func (client *ManagedVirtualNetworksClient) Get(ctx context.Context, resourceGroupName string, factoryName string, managedVirtualNetworkName string, options *ManagedVirtualNetworksClientGetOptions) (ManagedVirtualNetworksClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, managedVirtualNetworkName, options) if err != nil { return ManagedVirtualNetworksClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return ManagedVirtualNetworksClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return ManagedVirtualNetworksClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return ManagedVirtualNetworksClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client_example_test.go deleted file mode 100644 index b82a66448931..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/managedvirtualnetworks_client_example_test.go +++ /dev/null @@ -1,118 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_ListByFactory.json -func ExampleManagedVirtualNetworksClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewManagedVirtualNetworksClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.ManagedVirtualNetworkListResponse = armdatafactory.ManagedVirtualNetworkListResponse{ - // Value: []*armdatafactory.ManagedVirtualNetworkResource{ - // { - // Name: to.Ptr("exampleManagedVirtualNetworkName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/managedVirtualNetworks"), - // Etag: to.Ptr("0400f1a1-0000-0000-0000-5b2188640000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/exampleManagedVirtualNetworkName"), - // Properties: &armdatafactory.ManagedVirtualNetwork{ - // Alias: to.Ptr("exampleFactoryName"), - // VNetID: to.Ptr("5a7bd944-87e6-454a-8d4d-9fba446514fd"), - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_Create.json -func ExampleManagedVirtualNetworksClient_CreateOrUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewManagedVirtualNetworksClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", armdatafactory.ManagedVirtualNetworkResource{ - Properties: &armdatafactory.ManagedVirtualNetwork{}, - }, &armdatafactory.ManagedVirtualNetworksClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ManagedVirtualNetworkResource = armdatafactory.ManagedVirtualNetworkResource{ - // Name: to.Ptr("exampleManagedVirtualNetworkName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/managedVirtualNetworks"), - // Etag: to.Ptr("000046c4-0000-0000-0000-5b2198bf0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/exampleManagedVirtualNetworkName"), - // Properties: &armdatafactory.ManagedVirtualNetwork{ - // Alias: to.Ptr("exampleFactoryName"), - // VNetID: to.Ptr("12345678-1234-1234-1234-12345678123"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ManagedVirtualNetworks_Get.json -func ExampleManagedVirtualNetworksClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewManagedVirtualNetworksClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleManagedVirtualNetworkName", &armdatafactory.ManagedVirtualNetworksClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.ManagedVirtualNetworkResource = armdatafactory.ManagedVirtualNetworkResource{ - // Name: to.Ptr("exampleManagedVirtualNetworkName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/managedVirtualNetworks"), - // Etag: to.Ptr("15003c4f-0000-0200-0000-5cbe090b0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/exampleManagedVirtualNetworkName"), - // Properties: &armdatafactory.ManagedVirtualNetwork{ - // Alias: to.Ptr("exampleFactoryName"), - // VNetID: to.Ptr("5a7bd944-87e6-454a-8d4d-9fba446514fd"), - // }, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/models.go b/sdk/resourcemanager/datafactory/armdatafactory/models.go index 9da16bb688d3..9a506386215d 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/models.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/models.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -23,22 +22,6 @@ type AccessPolicyResponse struct { Policy *UserAccessPolicy } -// ActivityClassification provides polymorphic access to related types. -// Call the interface's GetActivity() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *Activity, *AppendVariableActivity, *AzureDataExplorerCommandActivity, *AzureFunctionActivity, *AzureMLBatchExecutionActivity, -// - *AzureMLExecutePipelineActivity, *AzureMLUpdateResourceActivity, *ControlActivity, *CopyActivity, *CustomActivity, *DataLakeAnalyticsUSQLActivity, -// - *DatabricksNotebookActivity, *DatabricksSparkJarActivity, *DatabricksSparkPythonActivity, *DeleteActivity, *ExecuteDataFlowActivity, -// - *ExecutePipelineActivity, *ExecuteSSISPackageActivity, *ExecuteWranglingDataflowActivity, *ExecutionActivity, *FailActivity, -// - *FilterActivity, *ForEachActivity, *GetMetadataActivity, *HDInsightHiveActivity, *HDInsightMapReduceActivity, *HDInsightPigActivity, -// - *HDInsightSparkActivity, *HDInsightStreamingActivity, *IfConditionActivity, *LookupActivity, *SQLServerStoredProcedureActivity, -// - *ScriptActivity, *SetVariableActivity, *SwitchActivity, *SynapseNotebookActivity, *SynapseSparkJobDefinitionActivity, -// - *UntilActivity, *ValidationActivity, *WaitActivity, *WebActivity, *WebHookActivity -type ActivityClassification interface { - // GetActivity returns the Activity content of the underlying type. - GetActivity() *Activity -} - // Activity - A pipeline activity. type Activity struct { // REQUIRED; Activity name. @@ -56,6 +39,13 @@ type Activity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -142,12 +132,6 @@ type ActivityRun struct { Status *string } -// ActivityRunsClientQueryByPipelineRunOptions contains the optional parameters for the ActivityRunsClient.QueryByPipelineRun -// method. -type ActivityRunsClientQueryByPipelineRunOptions struct { - // placeholder for future optional parameters -} - // ActivityRunsQueryResponse - A list activity runs. type ActivityRunsQueryResponse struct { // REQUIRED; List of activity runs. @@ -190,12 +174,12 @@ type AmazonMWSLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AmazonMWSLinkedService. func (a *AmazonMWSLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -215,8 +199,8 @@ type AmazonMWSLinkedServiceTypeProperties struct { SellerID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Amazon MWS authentication token. MwsAuthToken SecretBaseClassification @@ -272,15 +256,15 @@ type AmazonMWSObjectDataset struct { // GetDataset implements the DatasetClassification interface for type AmazonMWSObjectDataset. func (a *AmazonMWSObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -318,26 +302,26 @@ type AmazonMWSSource struct { // GetCopySource implements the CopySourceClassification interface for type AmazonMWSSource. func (a *AmazonMWSSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AmazonMWSSource. func (a *AmazonMWSSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -347,8 +331,8 @@ type AmazonRdsForLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password SecretBaseClassification @@ -381,12 +365,12 @@ type AmazonRdsForOracleLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AmazonRdsForOracleLinkedService. func (a *AmazonRdsForOracleLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -449,12 +433,12 @@ type AmazonRdsForOracleSource struct { // GetCopySource implements the CopySourceClassification interface for type AmazonRdsForOracleSource. func (a *AmazonRdsForOracleSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -495,15 +479,15 @@ type AmazonRdsForOracleTableDataset struct { // GetDataset implements the DatasetClassification interface for type AmazonRdsForOracleTableDataset. func (a *AmazonRdsForOracleTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -543,12 +527,12 @@ type AmazonRdsForSQLServerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AmazonRdsForSQLServerLinkedService. func (a *AmazonRdsForSQLServerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -561,8 +545,8 @@ type AmazonRdsForSQLServerLinkedServiceTypeProperties struct { AlwaysEncryptedSettings *SQLAlwaysEncryptedProperties // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The on-premises Windows authentication password. Password SecretBaseClassification @@ -586,6 +570,11 @@ type AmazonRdsForSQLServerSource struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any + // Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. + // The default value is ReadCommitted. Type: string (or + // Expression with resultType string). + IsolationLevel any + // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any @@ -622,26 +611,26 @@ type AmazonRdsForSQLServerSource struct { // GetCopySource implements the CopySourceClassification interface for type AmazonRdsForSQLServerSource. func (a *AmazonRdsForSQLServerSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AmazonRdsForSQLServerSource. func (a *AmazonRdsForSQLServerSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -682,15 +671,15 @@ type AmazonRdsForSQLServerTableDataset struct { // GetDataset implements the DatasetClassification interface for type AmazonRdsForSQLServerTableDataset. func (a *AmazonRdsForSQLServerTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -730,12 +719,12 @@ type AmazonRedshiftLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AmazonRedshiftLinkedService. func (a *AmazonRedshiftLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -748,8 +737,8 @@ type AmazonRedshiftLinkedServiceTypeProperties struct { Server any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password of the Amazon Redshift source. Password SecretBaseClassification @@ -801,26 +790,26 @@ type AmazonRedshiftSource struct { // GetCopySource implements the CopySourceClassification interface for type AmazonRedshiftSource. func (a *AmazonRedshiftSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AmazonRedshiftSource. func (a *AmazonRedshiftSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -861,15 +850,15 @@ type AmazonRedshiftTableDataset struct { // GetDataset implements the DatasetClassification interface for type AmazonRedshiftTableDataset. func (a *AmazonRedshiftTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -912,12 +901,12 @@ type AmazonS3CompatibleLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AmazonS3CompatibleLinkedService. func (a *AmazonS3CompatibleLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -928,8 +917,8 @@ type AmazonS3CompatibleLinkedServiceTypeProperties struct { AccessKeyID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // If true, use S3 path-style access instead of virtual hosted-style access. Default value is false. Type: boolean (or Expression // with resultType boolean). @@ -968,10 +957,10 @@ type AmazonS3CompatibleLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type AmazonS3CompatibleLocation. func (a *AmazonS3CompatibleLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: a.Type, - FolderPath: a.FolderPath, - FileName: a.FileName, AdditionalProperties: a.AdditionalProperties, + FileName: a.FileName, + FolderPath: a.FolderPath, + Type: a.Type, } } @@ -990,8 +979,8 @@ type AmazonS3CompatibleReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -1026,10 +1015,10 @@ type AmazonS3CompatibleReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type AmazonS3CompatibleReadSettings. func (a *AmazonS3CompatibleReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -1070,15 +1059,15 @@ type AmazonS3Dataset struct { // GetDataset implements the DatasetClassification interface for type AmazonS3Dataset. func (a *AmazonS3Dataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -1136,12 +1125,12 @@ type AmazonS3LinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AmazonS3LinkedService. func (a *AmazonS3LinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1156,8 +1145,8 @@ type AmazonS3LinkedServiceTypeProperties struct { AuthenticationType any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The secret access key of the Amazon S3 Identity and Access Management (IAM) user. SecretAccessKey SecretBaseClassification @@ -1195,10 +1184,10 @@ type AmazonS3Location struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type AmazonS3Location. func (a *AmazonS3Location) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: a.Type, - FolderPath: a.FolderPath, - FileName: a.FileName, AdditionalProperties: a.AdditionalProperties, + FileName: a.FileName, + FolderPath: a.FolderPath, + Type: a.Type, } } @@ -1217,8 +1206,8 @@ type AmazonS3ReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -1253,10 +1242,10 @@ type AmazonS3ReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type AmazonS3ReadSettings. func (a *AmazonS3ReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -1287,12 +1276,12 @@ type AppFiguresLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AppFiguresLinkedService. func (a *AppFiguresLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1304,7 +1293,7 @@ type AppFiguresLinkedServiceTypeProperties struct { // REQUIRED; The password of the AppFigures source. Password SecretBaseClassification - // REQUIRED; The username of the Appfigures source. + // REQUIRED; The username of the Appfigures source. Type: string (or Expression with resultType string). UserName any } @@ -1328,6 +1317,13 @@ type AppendVariableActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -1335,30 +1331,35 @@ type AppendVariableActivity struct { // GetActivity implements the ActivityClassification interface for type AppendVariableActivity. func (a *AppendVariableActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type AppendVariableActivity. func (a *AppendVariableActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // AppendVariableActivityTypeProperties - AppendVariable activity properties. type AppendVariableActivityTypeProperties struct { - // Value to be appended. Could be a static value or Expression + // Value to be appended. Type: could be a static value matching type of the variable item or Expression with resultType matching + // type of the variable item Value any // Name of the variable whose value needs to be appended to. @@ -1398,12 +1399,12 @@ type AsanaLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AsanaLinkedService. func (a *AsanaLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1413,8 +1414,8 @@ type AsanaLinkedServiceTypeProperties struct { APIToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // AvroDataset - Avro dataset. @@ -1454,15 +1455,15 @@ type AvroDataset struct { // GetDataset implements the DatasetClassification interface for type AvroDataset. func (a *AvroDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -1494,10 +1495,10 @@ type AvroFormat struct { // GetDatasetStorageFormat implements the DatasetStorageFormatClassification interface for type AvroFormat. func (a *AvroFormat) GetDatasetStorageFormat() *DatasetStorageFormat { return &DatasetStorageFormat{ - Type: a.Type, - Serializer: a.Serializer, - Deserializer: a.Deserializer, AdditionalProperties: a.AdditionalProperties, + Deserializer: a.Deserializer, + Serializer: a.Serializer, + Type: a.Type, } } @@ -1537,14 +1538,14 @@ type AvroSink struct { // GetCopySink implements the CopySinkClassification interface for type AvroSink. func (a *AvroSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -1579,12 +1580,12 @@ type AvroSource struct { // GetCopySource implements the CopySourceClassification interface for type AvroSource. func (a *AvroSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1614,8 +1615,8 @@ type AvroWriteSettings struct { // GetFormatWriteSettings implements the FormatWriteSettingsClassification interface for type AvroWriteSettings. func (a *AvroWriteSettings) GetFormatWriteSettings() *FormatWriteSettings { return &FormatWriteSettings{ - Type: a.Type, AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1668,12 +1669,12 @@ type AzureBatchLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureBatchLinkedService. func (a *AzureBatchLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1698,8 +1699,8 @@ type AzureBatchLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // AzureBlobDataset - The Azure Blob storage. @@ -1739,15 +1740,15 @@ type AzureBlobDataset struct { // GetDataset implements the DatasetClassification interface for type AzureBlobDataset. func (a *AzureBlobDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -1812,15 +1813,15 @@ type AzureBlobFSDataset struct { // GetDataset implements the DatasetClassification interface for type AzureBlobFSDataset. func (a *AzureBlobFSDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -1866,12 +1867,12 @@ type AzureBlobFSLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureBlobFSLinkedService. func (a *AzureBlobFSLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -1889,8 +1890,8 @@ type AzureBlobFSLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of sasToken in sas uri. SasToken SecretBaseClassification @@ -1944,10 +1945,10 @@ type AzureBlobFSLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type AzureBlobFSLocation. func (a *AzureBlobFSLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: a.Type, - FolderPath: a.FolderPath, - FileName: a.FileName, AdditionalProperties: a.AdditionalProperties, + FileName: a.FileName, + FolderPath: a.FolderPath, + Type: a.Type, } } @@ -1966,8 +1967,8 @@ type AzureBlobFSReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -1999,10 +2000,10 @@ type AzureBlobFSReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type AzureBlobFSReadSettings. func (a *AzureBlobFSReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -2014,7 +2015,7 @@ type AzureBlobFSSink struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. AdditionalProperties map[string]any - // The type of copy behavior for copy sink. + // The type of copy behavior for copy sink. Type: string (or Expression with resultType string). CopyBehavior any // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). @@ -2042,14 +2043,14 @@ type AzureBlobFSSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureBlobFSSink. func (a *AzureBlobFSSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -2087,12 +2088,12 @@ type AzureBlobFSSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureBlobFSSource. func (a *AzureBlobFSSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -2120,11 +2121,11 @@ type AzureBlobFSWriteSettings struct { // GetStoreWriteSettings implements the StoreWriteSettingsClassification interface for type AzureBlobFSWriteSettings. func (a *AzureBlobFSWriteSettings) GetStoreWriteSettings() *StoreWriteSettings { return &StoreWriteSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - CopyBehavior: a.CopyBehavior, AdditionalProperties: a.AdditionalProperties, + CopyBehavior: a.CopyBehavior, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -2155,12 +2156,12 @@ type AzureBlobStorageLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureBlobStorageLinkedService. func (a *AzureBlobStorageLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -2172,7 +2173,7 @@ type AzureBlobStorageLinkedServiceTypeProperties struct { // Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose // v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with // resultType string). - AccountKind *string + AccountKind any // The type used for authentication. Type: string. AuthenticationType *AzureStorageAuthenticationType @@ -2193,7 +2194,7 @@ type AzureBlobStorageLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). + // Type: string. EncryptedCredential *string // The Azure key vault secret reference of sasToken in sas uri. @@ -2204,7 +2205,7 @@ type AzureBlobStorageLinkedServiceTypeProperties struct { SasURI any // Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. - ServiceEndpoint *string + ServiceEndpoint any // The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with // resultType string). @@ -2238,10 +2239,10 @@ type AzureBlobStorageLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type AzureBlobStorageLocation. func (a *AzureBlobStorageLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: a.Type, - FolderPath: a.FolderPath, - FileName: a.FileName, AdditionalProperties: a.AdditionalProperties, + FileName: a.FileName, + FolderPath: a.FolderPath, + Type: a.Type, } } @@ -2260,8 +2261,8 @@ type AzureBlobStorageReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -2296,10 +2297,10 @@ type AzureBlobStorageReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type AzureBlobStorageReadSettings. func (a *AzureBlobStorageReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -2327,11 +2328,11 @@ type AzureBlobStorageWriteSettings struct { // GetStoreWriteSettings implements the StoreWriteSettingsClassification interface for type AzureBlobStorageWriteSettings. func (a *AzureBlobStorageWriteSettings) GetStoreWriteSettings() *StoreWriteSettings { return &StoreWriteSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - CopyBehavior: a.CopyBehavior, AdditionalProperties: a.AdditionalProperties, + CopyBehavior: a.CopyBehavior, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -2358,9 +2359,16 @@ type AzureDataExplorerCommandActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -2368,26 +2376,30 @@ type AzureDataExplorerCommandActivity struct { // GetActivity implements the ActivityClassification interface for type AzureDataExplorerCommandActivity. func (a *AzureDataExplorerCommandActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type AzureDataExplorerCommandActivity. func (a *AzureDataExplorerCommandActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, LinkedServiceName: a.LinkedServiceName, - Policy: a.Policy, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + Policy: a.Policy, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } @@ -2434,12 +2446,12 @@ type AzureDataExplorerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureDataExplorerLinkedService. func (a *AzureDataExplorerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -2505,14 +2517,14 @@ type AzureDataExplorerSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureDataExplorerSink. func (a *AzureDataExplorerSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -2554,12 +2566,12 @@ type AzureDataExplorerSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureDataExplorerSource. func (a *AzureDataExplorerSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -2600,15 +2612,15 @@ type AzureDataExplorerTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureDataExplorerTableDataset. func (a *AzureDataExplorerTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -2639,12 +2651,12 @@ type AzureDataLakeAnalyticsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureDataLakeAnalyticsLinkedService. func (a *AzureDataLakeAnalyticsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -2661,8 +2673,8 @@ type AzureDataLakeAnalyticsLinkedServiceTypeProperties struct { DataLakeAnalyticsURI any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with // resultType string). @@ -2717,15 +2729,15 @@ type AzureDataLakeStoreDataset struct { // GetDataset implements the DatasetClassification interface for type AzureDataLakeStoreDataset. func (a *AzureDataLakeStoreDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -2771,12 +2783,12 @@ type AzureDataLakeStoreLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureDataLakeStoreLinkedService. func (a *AzureDataLakeStoreLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -2797,8 +2809,8 @@ type AzureDataLakeStoreLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with // resultType string). @@ -2837,10 +2849,10 @@ type AzureDataLakeStoreLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type AzureDataLakeStoreLocation. func (a *AzureDataLakeStoreLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: a.Type, - FolderPath: a.FolderPath, - FileName: a.FileName, AdditionalProperties: a.AdditionalProperties, + FileName: a.FileName, + FolderPath: a.FolderPath, + Type: a.Type, } } @@ -2859,8 +2871,8 @@ type AzureDataLakeStoreReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -2902,10 +2914,10 @@ type AzureDataLakeStoreReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type AzureDataLakeStoreReadSettings. func (a *AzureDataLakeStoreReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -2917,7 +2929,7 @@ type AzureDataLakeStoreSink struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. AdditionalProperties map[string]any - // The type of copy behavior for copy sink. + // The type of copy behavior for copy sink. Type: string (or Expression with resultType string). CopyBehavior any // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). @@ -2945,14 +2957,14 @@ type AzureDataLakeStoreSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureDataLakeStoreSink. func (a *AzureDataLakeStoreSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -2984,12 +2996,12 @@ type AzureDataLakeStoreSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureDataLakeStoreSource. func (a *AzureDataLakeStoreSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3008,8 +3020,8 @@ type AzureDataLakeStoreWriteSettings struct { DisableMetricsCollection any // Specifies the expiry time of the written files. The time is applied to the UTC time zone in the format of "2018-12-01T05:00:00Z". - // Default value is NULL. Type: integer (or Expression with resultType - // integer). + // Default value is NULL. Type: string (or Expression with resultType + // string). ExpiryDateTime any // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). @@ -3019,11 +3031,11 @@ type AzureDataLakeStoreWriteSettings struct { // GetStoreWriteSettings implements the StoreWriteSettingsClassification interface for type AzureDataLakeStoreWriteSettings. func (a *AzureDataLakeStoreWriteSettings) GetStoreWriteSettings() *StoreWriteSettings { return &StoreWriteSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - CopyBehavior: a.CopyBehavior, AdditionalProperties: a.AdditionalProperties, + CopyBehavior: a.CopyBehavior, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -3064,15 +3076,15 @@ type AzureDatabricksDeltaLakeDataset struct { // GetDataset implements the DatasetClassification interface for type AzureDatabricksDeltaLakeDataset. func (a *AzureDatabricksDeltaLakeDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -3104,8 +3116,8 @@ type AzureDatabricksDeltaLakeExportCommand struct { // GetExportSettings implements the ExportSettingsClassification interface for type AzureDatabricksDeltaLakeExportCommand. func (a *AzureDatabricksDeltaLakeExportCommand) GetExportSettings() *ExportSettings { return &ExportSettings{ - Type: a.Type, AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3127,8 +3139,8 @@ type AzureDatabricksDeltaLakeImportCommand struct { // GetImportSettings implements the ImportSettingsClassification interface for type AzureDatabricksDeltaLakeImportCommand. func (a *AzureDatabricksDeltaLakeImportCommand) GetImportSettings() *ImportSettings { return &ImportSettings{ - Type: a.Type, AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3159,12 +3171,12 @@ type AzureDatabricksDeltaLakeLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureDatabricksDeltaLakeLinkedService. func (a *AzureDatabricksDeltaLakeLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3204,14 +3216,14 @@ type AzureDatabricksDeltaLakeSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureDatabricksDeltaLakeSink. func (a *AzureDatabricksDeltaLakeSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -3245,12 +3257,12 @@ type AzureDatabricksDeltaLakeSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureDatabricksDeltaLakeSource. func (a *AzureDatabricksDeltaLakeSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3272,8 +3284,8 @@ type AzureDatabricksDetltaLakeLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). WorkspaceResourceID any @@ -3306,12 +3318,12 @@ type AzureDatabricksLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureDatabricksLinkedService. func (a *AzureDatabricksLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3333,8 +3345,8 @@ type AzureDatabricksLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression // with resultType string). @@ -3421,12 +3433,12 @@ type AzureFileStorageLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureFileStorageLinkedService. func (a *AzureFileStorageLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3439,8 +3451,8 @@ type AzureFileStorageLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType // string). @@ -3484,10 +3496,10 @@ type AzureFileStorageLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type AzureFileStorageLocation. func (a *AzureFileStorageLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: a.Type, - FolderPath: a.FolderPath, - FileName: a.FileName, AdditionalProperties: a.AdditionalProperties, + FileName: a.FileName, + FolderPath: a.FolderPath, + Type: a.Type, } } @@ -3506,8 +3518,8 @@ type AzureFileStorageReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -3542,10 +3554,10 @@ type AzureFileStorageReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type AzureFileStorageReadSettings. func (a *AzureFileStorageReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -3570,11 +3582,11 @@ type AzureFileStorageWriteSettings struct { // GetStoreWriteSettings implements the StoreWriteSettingsClassification interface for type AzureFileStorageWriteSettings. func (a *AzureFileStorageWriteSettings) GetStoreWriteSettings() *StoreWriteSettings { return &StoreWriteSettings{ - Type: a.Type, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - CopyBehavior: a.CopyBehavior, AdditionalProperties: a.AdditionalProperties, + CopyBehavior: a.CopyBehavior, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + Type: a.Type, } } @@ -3601,9 +3613,16 @@ type AzureFunctionActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -3611,26 +3630,30 @@ type AzureFunctionActivity struct { // GetActivity implements the ActivityClassification interface for type AzureFunctionActivity. func (a *AzureFunctionActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type AzureFunctionActivity. func (a *AzureFunctionActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, LinkedServiceName: a.LinkedServiceName, - Policy: a.Policy, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + Policy: a.Policy, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } @@ -3680,12 +3703,12 @@ type AzureFunctionLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureFunctionLinkedService. func (a *AzureFunctionLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3702,8 +3725,8 @@ type AzureFunctionLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Function or Host key for Azure Function App. FunctionKey SecretBaseClassification @@ -3739,12 +3762,12 @@ type AzureKeyVaultLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureKeyVaultLinkedService. func (a *AzureKeyVaultLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3804,9 +3827,16 @@ type AzureMLBatchExecutionActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -3814,26 +3844,30 @@ type AzureMLBatchExecutionActivity struct { // GetActivity implements the ActivityClassification interface for type AzureMLBatchExecutionActivity. func (a *AzureMLBatchExecutionActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type AzureMLBatchExecutionActivity. func (a *AzureMLBatchExecutionActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, LinkedServiceName: a.LinkedServiceName, - Policy: a.Policy, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + Policy: a.Policy, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } @@ -3878,9 +3912,16 @@ type AzureMLExecutePipelineActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -3888,26 +3929,30 @@ type AzureMLExecutePipelineActivity struct { // GetActivity implements the ActivityClassification interface for type AzureMLExecutePipelineActivity. func (a *AzureMLExecutePipelineActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type AzureMLExecutePipelineActivity. func (a *AzureMLExecutePipelineActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, LinkedServiceName: a.LinkedServiceName, - Policy: a.Policy, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + Policy: a.Policy, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } @@ -3976,12 +4021,12 @@ type AzureMLLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureMLLinkedService. func (a *AzureMLLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -3999,8 +4044,8 @@ type AzureMLLinkedServiceTypeProperties struct { Authentication any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio // web service. Type: string (or Expression with resultType string). @@ -4044,12 +4089,12 @@ type AzureMLServiceLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureMLServiceLinkedService. func (a *AzureMLServiceLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4064,9 +4109,13 @@ type AzureMLServiceLinkedServiceTypeProperties struct { // REQUIRED; Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). SubscriptionID any + // Type of authentication (Required to specify MSI) used to connect to AzureML. Type: string (or Expression with resultType + // string). + Authentication any + // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: // string (or Expression with resultType string). @@ -4102,9 +4151,16 @@ type AzureMLUpdateResourceActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -4112,26 +4168,30 @@ type AzureMLUpdateResourceActivity struct { // GetActivity implements the ActivityClassification interface for type AzureMLUpdateResourceActivity. func (a *AzureMLUpdateResourceActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type AzureMLUpdateResourceActivity. func (a *AzureMLUpdateResourceActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: a.AdditionalProperties, + DependsOn: a.DependsOn, + Description: a.Description, LinkedServiceName: a.LinkedServiceName, - Policy: a.Policy, Name: a.Name, + OnInactiveMarkAs: a.OnInactiveMarkAs, + Policy: a.Policy, + State: a.State, Type: a.Type, - Description: a.Description, - DependsOn: a.DependsOn, UserProperties: a.UserProperties, - AdditionalProperties: a.AdditionalProperties, } } @@ -4186,12 +4246,12 @@ type AzureMariaDBLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureMariaDBLinkedService. func (a *AzureMariaDBLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4201,8 +4261,8 @@ type AzureMariaDBLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Pwd *AzureKeyVaultSecretReference @@ -4242,26 +4302,26 @@ type AzureMariaDBSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureMariaDBSource. func (a *AzureMariaDBSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AzureMariaDBSource. func (a *AzureMariaDBSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4302,15 +4362,15 @@ type AzureMariaDBTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureMariaDBTableDataset. func (a *AzureMariaDBTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -4341,12 +4401,12 @@ type AzureMySQLLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureMySQLLinkedService. func (a *AzureMySQLLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4356,8 +4416,8 @@ type AzureMySQLLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -4396,14 +4456,14 @@ type AzureMySQLSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureMySQLSink. func (a *AzureMySQLSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -4441,26 +4501,26 @@ type AzureMySQLSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureMySQLSource. func (a *AzureMySQLSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AzureMySQLSource. func (a *AzureMySQLSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4501,15 +4561,15 @@ type AzureMySQLTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureMySQLTableDataset. func (a *AzureMySQLTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -4549,12 +4609,12 @@ type AzurePostgreSQLLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzurePostgreSQLLinkedService. func (a *AzurePostgreSQLLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4564,8 +4624,8 @@ type AzurePostgreSQLLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -4604,14 +4664,14 @@ type AzurePostgreSQLSink struct { // GetCopySink implements the CopySinkClassification interface for type AzurePostgreSQLSink. func (a *AzurePostgreSQLSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -4649,26 +4709,26 @@ type AzurePostgreSQLSource struct { // GetCopySource implements the CopySourceClassification interface for type AzurePostgreSQLSource. func (a *AzurePostgreSQLSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AzurePostgreSQLSource. func (a *AzurePostgreSQLSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4709,15 +4769,15 @@ type AzurePostgreSQLTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzurePostgreSQLTableDataset. func (a *AzurePostgreSQLTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -4764,14 +4824,14 @@ type AzureQueueSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureQueueSink. func (a *AzureQueueSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -4802,12 +4862,12 @@ type AzureSQLDWLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureSQLDWLinkedService. func (a *AzureSQLDWLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4826,8 +4886,8 @@ type AzureSQLDWLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -4880,15 +4940,15 @@ type AzureSQLDWTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureSQLDWTableDataset. func (a *AzureSQLDWTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -4931,12 +4991,12 @@ type AzureSQLDatabaseLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureSQLDatabaseLinkedService. func (a *AzureSQLDatabaseLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -4957,8 +5017,8 @@ type AzureSQLDatabaseLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -5001,12 +5061,12 @@ type AzureSQLMILinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureSQLMILinkedService. func (a *AzureSQLMILinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5027,8 +5087,8 @@ type AzureSQLMILinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -5081,15 +5141,15 @@ type AzureSQLMITableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureSQLMITableDataset. func (a *AzureSQLMITableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -5163,14 +5223,14 @@ type AzureSQLSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureSQLSink. func (a *AzureSQLSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -5189,6 +5249,11 @@ type AzureSQLSource struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any + // Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. + // The default value is ReadCommitted. Type: string (or + // Expression with resultType string). + IsolationLevel any + // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any @@ -5225,26 +5290,26 @@ type AzureSQLSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureSQLSource. func (a *AzureSQLSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AzureSQLSource. func (a *AzureSQLSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5285,15 +5350,15 @@ type AzureSQLTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureSQLTableDataset. func (a *AzureSQLTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -5346,15 +5411,15 @@ type AzureSearchIndexDataset struct { // GetDataset implements the DatasetClassification interface for type AzureSearchIndexDataset. func (a *AzureSearchIndexDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -5397,14 +5462,14 @@ type AzureSearchIndexSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureSearchIndexSink. func (a *AzureSearchIndexSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -5435,12 +5500,12 @@ type AzureSearchLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureSearchLinkedService. func (a *AzureSearchLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5450,8 +5515,8 @@ type AzureSearchLinkedServiceTypeProperties struct { URL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Admin Key for Azure Search service Key SecretBaseClassification @@ -5484,12 +5549,12 @@ type AzureStorageLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureStorageLinkedService. func (a *AzureStorageLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5502,7 +5567,7 @@ type AzureStorageLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). + // Type: string. EncryptedCredential *string // The Azure key vault secret reference of sasToken in sas uri. @@ -5540,12 +5605,12 @@ type AzureSynapseArtifactsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureSynapseArtifactsLinkedService. func (a *AzureSynapseArtifactsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5602,15 +5667,15 @@ type AzureTableDataset struct { // GetDataset implements the DatasetClassification interface for type AzureTableDataset. func (a *AzureTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, Description: a.Description, - Structure: a.Structure, - Schema: a.Schema, + Folder: a.Folder, LinkedServiceName: a.LinkedServiceName, Parameters: a.Parameters, - Annotations: a.Annotations, - Folder: a.Folder, - AdditionalProperties: a.AdditionalProperties, + Schema: a.Schema, + Structure: a.Structure, + Type: a.Type, } } @@ -5662,14 +5727,14 @@ type AzureTableSink struct { // GetCopySink implements the CopySinkClassification interface for type AzureTableSink. func (a *AzureTableSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + SinkRetryCount: a.SinkRetryCount, + SinkRetryWait: a.SinkRetryWait, Type: a.Type, WriteBatchSize: a.WriteBatchSize, WriteBatchTimeout: a.WriteBatchTimeout, - SinkRetryCount: a.SinkRetryCount, - SinkRetryWait: a.SinkRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, } } @@ -5710,26 +5775,26 @@ type AzureTableSource struct { // GetCopySource implements the CopySourceClassification interface for type AzureTableSource. func (a *AzureTableSource) GetCopySource() *CopySource { return &CopySource{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type AzureTableSource. func (a *AzureTableSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: a.QueryTimeout, AdditionalColumns: a.AdditionalColumns, - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + DisableMetricsCollection: a.DisableMetricsCollection, + MaxConcurrentConnections: a.MaxConcurrentConnections, + QueryTimeout: a.QueryTimeout, SourceRetryCount: a.SourceRetryCount, SourceRetryWait: a.SourceRetryWait, - MaxConcurrentConnections: a.MaxConcurrentConnections, - DisableMetricsCollection: a.DisableMetricsCollection, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5760,12 +5825,12 @@ type AzureTableStorageLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type AzureTableStorageLinkedService. func (a *AzureTableStorageLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: a.Type, + AdditionalProperties: a.AdditionalProperties, + Annotations: a.Annotations, ConnectVia: a.ConnectVia, Description: a.Description, Parameters: a.Parameters, - Annotations: a.Annotations, - AdditionalProperties: a.AdditionalProperties, + Type: a.Type, } } @@ -5815,15 +5880,15 @@ type BinaryDataset struct { // GetDataset implements the DatasetClassification interface for type BinaryDataset. func (b *BinaryDataset) GetDataset() *Dataset { return &Dataset{ - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + Annotations: b.Annotations, Description: b.Description, - Structure: b.Structure, - Schema: b.Schema, + Folder: b.Folder, LinkedServiceName: b.LinkedServiceName, Parameters: b.Parameters, - Annotations: b.Annotations, - Folder: b.Folder, - AdditionalProperties: b.AdditionalProperties, + Schema: b.Schema, + Structure: b.Structure, + Type: b.Type, } } @@ -5851,8 +5916,8 @@ type BinaryReadSettings struct { // GetFormatReadSettings implements the FormatReadSettingsClassification interface for type BinaryReadSettings. func (b *BinaryReadSettings) GetFormatReadSettings() *FormatReadSettings { return &FormatReadSettings{ - Type: b.Type, AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } @@ -5889,14 +5954,14 @@ type BinarySink struct { // GetCopySink implements the CopySinkClassification interface for type BinarySink. func (b *BinarySink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: b.AdditionalProperties, + DisableMetricsCollection: b.DisableMetricsCollection, + MaxConcurrentConnections: b.MaxConcurrentConnections, + SinkRetryCount: b.SinkRetryCount, + SinkRetryWait: b.SinkRetryWait, Type: b.Type, WriteBatchSize: b.WriteBatchSize, WriteBatchTimeout: b.WriteBatchTimeout, - SinkRetryCount: b.SinkRetryCount, - SinkRetryWait: b.SinkRetryWait, - MaxConcurrentConnections: b.MaxConcurrentConnections, - DisableMetricsCollection: b.DisableMetricsCollection, - AdditionalProperties: b.AdditionalProperties, } } @@ -5930,12 +5995,12 @@ type BinarySource struct { // GetCopySource implements the CopySourceClassification interface for type BinarySource. func (b *BinarySource) GetCopySource() *CopySource { return &CopySource{ - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + DisableMetricsCollection: b.DisableMetricsCollection, + MaxConcurrentConnections: b.MaxConcurrentConnections, SourceRetryCount: b.SourceRetryCount, SourceRetryWait: b.SourceRetryWait, - MaxConcurrentConnections: b.MaxConcurrentConnections, - DisableMetricsCollection: b.DisableMetricsCollection, - AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } @@ -5966,23 +6031,23 @@ type BlobEventsTrigger struct { // GetMultiplePipelineTrigger implements the MultiplePipelineTriggerClassification interface for type BlobEventsTrigger. func (b *BlobEventsTrigger) GetMultiplePipelineTrigger() *MultiplePipelineTrigger { return &MultiplePipelineTrigger{ - Pipelines: b.Pipelines, - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + Annotations: b.Annotations, Description: b.Description, + Pipelines: b.Pipelines, RuntimeState: b.RuntimeState, - Annotations: b.Annotations, - AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } // GetTrigger implements the TriggerClassification interface for type BlobEventsTrigger. func (b *BlobEventsTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + Annotations: b.Annotations, Description: b.Description, RuntimeState: b.RuntimeState, - Annotations: b.Annotations, - AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } @@ -6053,14 +6118,14 @@ type BlobSink struct { // GetCopySink implements the CopySinkClassification interface for type BlobSink. func (b *BlobSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: b.AdditionalProperties, + DisableMetricsCollection: b.DisableMetricsCollection, + MaxConcurrentConnections: b.MaxConcurrentConnections, + SinkRetryCount: b.SinkRetryCount, + SinkRetryWait: b.SinkRetryWait, Type: b.Type, WriteBatchSize: b.WriteBatchSize, WriteBatchTimeout: b.WriteBatchTimeout, - SinkRetryCount: b.SinkRetryCount, - SinkRetryWait: b.SinkRetryWait, - MaxConcurrentConnections: b.MaxConcurrentConnections, - DisableMetricsCollection: b.DisableMetricsCollection, - AdditionalProperties: b.AdditionalProperties, } } @@ -6098,12 +6163,12 @@ type BlobSource struct { // GetCopySource implements the CopySourceClassification interface for type BlobSource. func (b *BlobSource) GetCopySource() *CopySource { return &CopySource{ - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + DisableMetricsCollection: b.DisableMetricsCollection, + MaxConcurrentConnections: b.MaxConcurrentConnections, SourceRetryCount: b.SourceRetryCount, SourceRetryWait: b.SourceRetryWait, - MaxConcurrentConnections: b.MaxConcurrentConnections, - DisableMetricsCollection: b.DisableMetricsCollection, - AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } @@ -6134,23 +6199,23 @@ type BlobTrigger struct { // GetMultiplePipelineTrigger implements the MultiplePipelineTriggerClassification interface for type BlobTrigger. func (b *BlobTrigger) GetMultiplePipelineTrigger() *MultiplePipelineTrigger { return &MultiplePipelineTrigger{ - Pipelines: b.Pipelines, - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + Annotations: b.Annotations, Description: b.Description, + Pipelines: b.Pipelines, RuntimeState: b.RuntimeState, - Annotations: b.Annotations, - AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } // GetTrigger implements the TriggerClassification interface for type BlobTrigger. func (b *BlobTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: b.Type, + AdditionalProperties: b.AdditionalProperties, + Annotations: b.Annotations, Description: b.Description, RuntimeState: b.RuntimeState, - Annotations: b.Annotations, - AdditionalProperties: b.AdditionalProperties, + Type: b.Type, } } @@ -6199,12 +6264,12 @@ type CassandraLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type CassandraLinkedService. func (c *CassandraLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6217,8 +6282,8 @@ type CassandraLinkedServiceTypeProperties struct { AuthenticationType any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for authentication. Password SecretBaseClassification @@ -6271,26 +6336,26 @@ type CassandraSource struct { // GetCopySource implements the CopySourceClassification interface for type CassandraSource. func (c *CassandraSource) GetCopySource() *CopySource { return &CopySource{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type CassandraSource. func (c *CassandraSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: c.QueryTimeout, AdditionalColumns: c.AdditionalColumns, - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, + QueryTimeout: c.QueryTimeout, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6331,15 +6396,15 @@ type CassandraTableDataset struct { // GetDataset implements the DatasetClassification interface for type CassandraTableDataset. func (c *CassandraTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -6382,11 +6447,11 @@ type ChainingTrigger struct { // GetTrigger implements the TriggerClassification interface for type ChainingTrigger. func (c *ChainingTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, RuntimeState: c.RuntimeState, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6399,6 +6464,67 @@ type ChainingTriggerTypeProperties struct { RunDimension *string } +// ChangeDataCapture - A Azure Data Factory object which automatically detects data changes at the source and then sends the +// updated data to the destination. +type ChangeDataCapture struct { + // REQUIRED; CDC policy + Policy *MapperPolicy + + // REQUIRED; List of sources connections that can be used as sources in the CDC. + SourceConnectionsInfo []*MapperSourceConnectionsInfo + + // REQUIRED; List of target connections that can be used as sources in the CDC. + TargetConnectionsInfo []*MapperTargetConnectionsInfo + + // A boolean to determine if the vnet configuration needs to be overwritten. + AllowVNetOverride *bool + + // The description of the change data capture. + Description *string + + // The folder that this CDC is in. If not specified, CDC will appear at the root level. + Folder *ChangeDataCaptureFolder + + // Status of the CDC as to if it is running or stopped. + Status *string +} + +// ChangeDataCaptureFolder - The folder that this CDC is in. If not specified, CDC will appear at the root level. +type ChangeDataCaptureFolder struct { + // The name of the folder that this CDC is in. + Name *string +} + +// ChangeDataCaptureListResponse - A list of change data capture resources. +type ChangeDataCaptureListResponse struct { + // REQUIRED; Lists all resources of type change data capture. + Value []*ChangeDataCaptureResource + + // The link to the next page of results, if any remaining results exist. + NextLink *string +} + +// ChangeDataCaptureResource - Change data capture resource type. +type ChangeDataCaptureResource struct { + // REQUIRED; Properties of the change data capture. + Properties *ChangeDataCapture + + // OPTIONAL; Contains additional key/value pairs not defined in the schema. + AdditionalProperties map[string]any + + // READ-ONLY; Etag identifies change in the resource. + Etag *string + + // READ-ONLY; The resource identifier. + ID *string + + // READ-ONLY; The resource name. + Name *string + + // READ-ONLY; The resource type. + Type *string +} + // CmdkeySetup - The custom setup of running cmdkey commands. type CmdkeySetup struct { // REQUIRED; The type of custom setup. @@ -6464,15 +6590,15 @@ type CommonDataServiceForAppsEntityDataset struct { // GetDataset implements the DatasetClassification interface for type CommonDataServiceForAppsEntityDataset. func (c *CommonDataServiceForAppsEntityDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -6509,12 +6635,12 @@ type CommonDataServiceForAppsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type CommonDataServiceForAppsLinkedService. func (c *CommonDataServiceForAppsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6531,8 +6657,8 @@ type CommonDataServiceForAppsLinkedServiceTypeProperties struct { DeploymentType any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed // for online. Type: string (or Expression with resultType string). @@ -6615,14 +6741,14 @@ type CommonDataServiceForAppsSink struct { // GetCopySink implements the CopySinkClassification interface for type CommonDataServiceForAppsSink. func (c *CommonDataServiceForAppsSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, + SinkRetryCount: c.SinkRetryCount, + SinkRetryWait: c.SinkRetryWait, Type: c.Type, WriteBatchSize: c.WriteBatchSize, WriteBatchTimeout: c.WriteBatchTimeout, - SinkRetryCount: c.SinkRetryCount, - SinkRetryWait: c.SinkRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, } } @@ -6658,12 +6784,12 @@ type CommonDataServiceForAppsSource struct { // GetCopySource implements the CopySourceClassification interface for type CommonDataServiceForAppsSource. func (c *CommonDataServiceForAppsSource) GetCopySource() *CopySource { return &CopySource{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6683,15 +6809,6 @@ func (c *ComponentSetup) GetCustomSetupBase() *CustomSetupBase { } } -// CompressionReadSettingsClassification provides polymorphic access to related types. -// Call the interface's GetCompressionReadSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *CompressionReadSettings, *TarGZipReadSettings, *TarReadSettings, *ZipDeflateReadSettings -type CompressionReadSettingsClassification interface { - // GetCompressionReadSettings returns the CompressionReadSettings content of the underlying type. - GetCompressionReadSettings() *CompressionReadSettings -} - // CompressionReadSettings - Compression read settings. type CompressionReadSettings struct { // REQUIRED; The Compression setting type. @@ -6731,12 +6848,12 @@ type ConcurLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ConcurLinkedService. func (c *ConcurLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6752,8 +6869,8 @@ type ConcurLinkedServiceTypeProperties struct { ConnectionProperties any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password corresponding to the user name that you provided in the username field. Password SecretBaseClassification @@ -6806,15 +6923,15 @@ type ConcurObjectDataset struct { // GetDataset implements the DatasetClassification interface for type ConcurObjectDataset. func (c *ConcurObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -6852,26 +6969,26 @@ type ConcurSource struct { // GetCopySource implements the CopySourceClassification interface for type ConcurSource. func (c *ConcurSource) GetCopySource() *CopySource { return &CopySource{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type ConcurSource. func (c *ConcurSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: c.QueryTimeout, AdditionalColumns: c.AdditionalColumns, - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, + QueryTimeout: c.QueryTimeout, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -6887,17 +7004,6 @@ type ConnectionStateProperties struct { Status *string } -// ControlActivityClassification provides polymorphic access to related types. -// Call the interface's GetControlActivity() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AppendVariableActivity, *ControlActivity, *ExecutePipelineActivity, *FailActivity, *FilterActivity, *ForEachActivity, -// - *IfConditionActivity, *SetVariableActivity, *SwitchActivity, *UntilActivity, *ValidationActivity, *WaitActivity, *WebHookActivity -type ControlActivityClassification interface { - ActivityClassification - // GetControlActivity returns the ControlActivity content of the underlying type. - GetControlActivity() *ControlActivity -} - // ControlActivity - Base class for all control activities like IfCondition, ForEach , Until. type ControlActivity struct { // REQUIRED; Activity name. @@ -6915,6 +7021,13 @@ type ControlActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -6922,12 +7035,14 @@ type ControlActivity struct { // GetActivity implements the ActivityClassification interface for type ControlActivity. func (c *ControlActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: c.AdditionalProperties, + DependsOn: c.DependsOn, + Description: c.Description, Name: c.Name, + OnInactiveMarkAs: c.OnInactiveMarkAs, + State: c.State, Type: c.Type, - Description: c.Description, - DependsOn: c.DependsOn, UserProperties: c.UserProperties, - AdditionalProperties: c.AdditionalProperties, } } @@ -6960,12 +7075,19 @@ type CopyActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // List of outputs for the activity. Outputs []*DatasetReference // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -6973,26 +7095,30 @@ type CopyActivity struct { // GetActivity implements the ActivityClassification interface for type CopyActivity. func (c *CopyActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: c.AdditionalProperties, + DependsOn: c.DependsOn, + Description: c.Description, Name: c.Name, + OnInactiveMarkAs: c.OnInactiveMarkAs, + State: c.State, Type: c.Type, - Description: c.Description, - DependsOn: c.DependsOn, UserProperties: c.UserProperties, - AdditionalProperties: c.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type CopyActivity. func (c *CopyActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: c.AdditionalProperties, + DependsOn: c.DependsOn, + Description: c.Description, LinkedServiceName: c.LinkedServiceName, - Policy: c.Policy, Name: c.Name, + OnInactiveMarkAs: c.OnInactiveMarkAs, + Policy: c.Policy, + State: c.State, Type: c.Type, - Description: c.Description, - DependsOn: c.DependsOn, UserProperties: c.UserProperties, - AdditionalProperties: c.AdditionalProperties, } } @@ -7068,20 +7194,6 @@ type CopyComputeScaleProperties struct { TimeToLive *int32 } -// CopySinkClassification provides polymorphic access to related types. -// Call the interface's GetCopySink() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AvroSink, *AzureBlobFSSink, *AzureDataExplorerSink, *AzureDataLakeStoreSink, *AzureDatabricksDeltaLakeSink, *AzureMySQLSink, -// - *AzurePostgreSQLSink, *AzureQueueSink, *AzureSQLSink, *AzureSearchIndexSink, *AzureTableSink, *BinarySink, *BlobSink, -// - *CommonDataServiceForAppsSink, *CopySink, *CosmosDbMongoDbAPISink, *CosmosDbSQLAPISink, *DelimitedTextSink, *DocumentDbCollectionSink, -// - *DynamicsCrmSink, *DynamicsSink, *FileSystemSink, *InformixSink, *JSONSink, *MicrosoftAccessSink, *MongoDbAtlasSink, -// - *MongoDbV2Sink, *OdbcSink, *OracleSink, *OrcSink, *ParquetSink, *RestSink, *SQLDWSink, *SQLMISink, *SQLServerSink, *SQLSink, -// - *SalesforceServiceCloudSink, *SalesforceSink, *SapCloudForCustomerSink, *SnowflakeSink -type CopySinkClassification interface { - // GetCopySink returns the CopySink content of the underlying type. - GetCopySink() *CopySink -} - // CopySink - A copy activity sink. type CopySink struct { // REQUIRED; Copy sink type. @@ -7112,28 +7224,6 @@ type CopySink struct { // GetCopySink implements the CopySinkClassification interface for type CopySink. func (c *CopySink) GetCopySink() *CopySink { return c } -// CopySourceClassification provides polymorphic access to related types. -// Call the interface's GetCopySource() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AmazonMWSSource, *AmazonRdsForOracleSource, *AmazonRdsForSQLServerSource, *AmazonRedshiftSource, *AvroSource, *AzureBlobFSSource, -// - *AzureDataExplorerSource, *AzureDataLakeStoreSource, *AzureDatabricksDeltaLakeSource, *AzureMariaDBSource, *AzureMySQLSource, -// - *AzurePostgreSQLSource, *AzureSQLSource, *AzureTableSource, *BinarySource, *BlobSource, *CassandraSource, *CommonDataServiceForAppsSource, -// - *ConcurSource, *CopySource, *CosmosDbMongoDbAPISource, *CosmosDbSQLAPISource, *CouchbaseSource, *Db2Source, *DelimitedTextSource, -// - *DocumentDbCollectionSource, *DrillSource, *DynamicsAXSource, *DynamicsCrmSource, *DynamicsSource, *EloquaSource, *ExcelSource, -// - *FileSystemSource, *GoogleAdWordsSource, *GoogleBigQuerySource, *GreenplumSource, *HBaseSource, *HTTPSource, *HdfsSource, -// - *HiveSource, *HubspotSource, *ImpalaSource, *InformixSource, *JSONSource, *JiraSource, *MagentoSource, *MariaDBSource, -// - *MarketoSource, *MicrosoftAccessSource, *MongoDbAtlasSource, *MongoDbSource, *MongoDbV2Source, *MySQLSource, *NetezzaSource, -// - *ODataSource, *OdbcSource, *Office365Source, *OracleServiceCloudSource, *OracleSource, *OrcSource, *ParquetSource, *PaypalSource, -// - *PhoenixSource, *PostgreSQLSource, *PrestoSource, *QuickBooksSource, *RelationalSource, *ResponsysSource, *RestSource, -// - *SQLDWSource, *SQLMISource, *SQLServerSource, *SQLSource, *SalesforceMarketingCloudSource, *SalesforceServiceCloudSource, -// - *SalesforceSource, *SapBwSource, *SapCloudForCustomerSource, *SapEccSource, *SapHanaSource, *SapOdpSource, *SapOpenHubSource, -// - *SapTableSource, *ServiceNowSource, *SharePointOnlineListSource, *ShopifySource, *SnowflakeSource, *SparkSource, *SquareSource, -// - *SybaseSource, *TabularSource, *TeradataSource, *VerticaSource, *WebSource, *XMLSource, *XeroSource, *ZohoSource -type CopySourceClassification interface { - // GetCopySource returns the CopySource content of the underlying type. - GetCopySource() *CopySource -} - // CopySource - A copy activity source. type CopySource struct { // REQUIRED; Copy source type. @@ -7185,12 +7275,12 @@ type CosmosDbLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type CosmosDbLinkedService. func (c *CosmosDbLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -7207,7 +7297,7 @@ type CosmosDbLinkedServiceTypeProperties struct { // string (or Expression with resultType string). AzureCloudType any - // The connection mode used to access CosmosDB account. Type: string (or Expression with resultType string). + // The connection mode used to access CosmosDB account. Type: string. ConnectionMode *CosmosDbConnectionMode // The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. @@ -7220,8 +7310,8 @@ type CosmosDbLinkedServiceTypeProperties struct { Database any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', // servicePrincipalCredential can be SecureString or @@ -7230,9 +7320,8 @@ type CosmosDbLinkedServiceTypeProperties struct { ServicePrincipalCredential SecretBaseClassification // The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, - // 'ServicePrincipalCert' for certificate. Type: string (or Expression with - // resultType string). - ServicePrincipalCredentialType *CosmosDbServicePrincipalCredentialType + // 'ServicePrincipalCert' for certificate. Type: string. + ServicePrincipalCredentialType any // The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression // with resultType string). @@ -7279,15 +7368,15 @@ type CosmosDbMongoDbAPICollectionDataset struct { // GetDataset implements the DatasetClassification interface for type CosmosDbMongoDbAPICollectionDataset. func (c *CosmosDbMongoDbAPICollectionDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -7324,12 +7413,12 @@ type CosmosDbMongoDbAPILinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type CosmosDbMongoDbAPILinkedService. func (c *CosmosDbMongoDbAPILinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -7383,14 +7472,14 @@ type CosmosDbMongoDbAPISink struct { // GetCopySink implements the CopySinkClassification interface for type CosmosDbMongoDbAPISink. func (c *CosmosDbMongoDbAPISink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, + SinkRetryCount: c.SinkRetryCount, + SinkRetryWait: c.SinkRetryWait, Type: c.Type, WriteBatchSize: c.WriteBatchSize, WriteBatchTimeout: c.WriteBatchTimeout, - SinkRetryCount: c.SinkRetryCount, - SinkRetryWait: c.SinkRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, } } @@ -7437,12 +7526,12 @@ type CosmosDbMongoDbAPISource struct { // GetCopySource implements the CopySourceClassification interface for type CosmosDbMongoDbAPISource. func (c *CosmosDbMongoDbAPISource) GetCopySource() *CopySource { return &CopySource{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -7483,15 +7572,15 @@ type CosmosDbSQLAPICollectionDataset struct { // GetDataset implements the DatasetClassification interface for type CosmosDbSQLAPICollectionDataset. func (c *CosmosDbSQLAPICollectionDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -7535,14 +7624,14 @@ type CosmosDbSQLAPISink struct { // GetCopySink implements the CopySinkClassification interface for type CosmosDbSQLAPISink. func (c *CosmosDbSQLAPISink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, + SinkRetryCount: c.SinkRetryCount, + SinkRetryWait: c.SinkRetryWait, Type: c.Type, WriteBatchSize: c.WriteBatchSize, WriteBatchTimeout: c.WriteBatchTimeout, - SinkRetryCount: c.SinkRetryCount, - SinkRetryWait: c.SinkRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, } } @@ -7586,12 +7675,12 @@ type CosmosDbSQLAPISource struct { // GetCopySource implements the CopySourceClassification interface for type CosmosDbSQLAPISource. func (c *CosmosDbSQLAPISource) GetCopySource() *CopySource { return &CopySource{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -7622,12 +7711,12 @@ type CouchbaseLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type CouchbaseLinkedService. func (c *CouchbaseLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -7640,8 +7729,8 @@ type CouchbaseLinkedServiceTypeProperties struct { CredString *AzureKeyVaultSecretReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // CouchbaseSource - A copy activity Couchbase server source. @@ -7678,26 +7767,26 @@ type CouchbaseSource struct { // GetCopySource implements the CopySourceClassification interface for type CouchbaseSource. func (c *CouchbaseSource) GetCopySource() *CopySource { return &CopySource{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type CouchbaseSource. func (c *CouchbaseSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: c.QueryTimeout, AdditionalColumns: c.AdditionalColumns, - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + DisableMetricsCollection: c.DisableMetricsCollection, + MaxConcurrentConnections: c.MaxConcurrentConnections, + QueryTimeout: c.QueryTimeout, SourceRetryCount: c.SourceRetryCount, SourceRetryWait: c.SourceRetryWait, - MaxConcurrentConnections: c.MaxConcurrentConnections, - DisableMetricsCollection: c.DisableMetricsCollection, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -7738,15 +7827,15 @@ type CouchbaseTableDataset struct { // GetDataset implements the DatasetClassification interface for type CouchbaseTableDataset. func (c *CouchbaseTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -7795,15 +7884,6 @@ type CreateRunResponse struct { RunID *string } -// CredentialClassification provides polymorphic access to related types. -// Call the interface's GetCredential() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *Credential, *ManagedIdentityCredential, *ServicePrincipalCredential -type CredentialClassification interface { - // GetCredential returns the Credential content of the underlying type. - GetCredential() *Credential -} - // Credential - The Azure Data Factory nested object which contains the information and credential which can be used to connect // with related store or compute resource. type Credential struct { @@ -7832,32 +7912,6 @@ type CredentialListResponse struct { NextLink *string } -// CredentialOperationsClientCreateOrUpdateOptions contains the optional parameters for the CredentialOperationsClient.CreateOrUpdate -// method. -type CredentialOperationsClientCreateOrUpdateOptions struct { - // ETag of the credential entity. Should only be specified for update, for which it should match existing entity or can be - // * for unconditional update. - IfMatch *string -} - -// CredentialOperationsClientDeleteOptions contains the optional parameters for the CredentialOperationsClient.Delete method. -type CredentialOperationsClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// CredentialOperationsClientGetOptions contains the optional parameters for the CredentialOperationsClient.Get method. -type CredentialOperationsClientGetOptions struct { - // ETag of the credential entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was - // provided, then no content will be returned. - IfNoneMatch *string -} - -// CredentialOperationsClientListByFactoryOptions contains the optional parameters for the CredentialOperationsClient.NewListByFactoryPager -// method. -type CredentialOperationsClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // CredentialReference - Credential reference type. type CredentialReference struct { // REQUIRED; Reference credential name. @@ -7893,9 +7947,16 @@ type CustomActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -7903,26 +7964,30 @@ type CustomActivity struct { // GetActivity implements the ActivityClassification interface for type CustomActivity. func (c *CustomActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: c.AdditionalProperties, + DependsOn: c.DependsOn, + Description: c.Description, Name: c.Name, + OnInactiveMarkAs: c.OnInactiveMarkAs, + State: c.State, Type: c.Type, - Description: c.Description, - DependsOn: c.DependsOn, UserProperties: c.UserProperties, - AdditionalProperties: c.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type CustomActivity. func (c *CustomActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: c.AdditionalProperties, + DependsOn: c.DependsOn, + Description: c.Description, LinkedServiceName: c.LinkedServiceName, - Policy: c.Policy, Name: c.Name, + OnInactiveMarkAs: c.OnInactiveMarkAs, + Policy: c.Policy, + State: c.State, Type: c.Type, - Description: c.Description, - DependsOn: c.DependsOn, UserProperties: c.UserProperties, - AdditionalProperties: c.AdditionalProperties, } } @@ -7987,12 +8052,12 @@ type CustomDataSourceLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type CustomDataSourceLinkedService. func (c *CustomDataSourceLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, ConnectVia: c.ConnectVia, Description: c.Description, Parameters: c.Parameters, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -8033,15 +8098,15 @@ type CustomDataset struct { // GetDataset implements the DatasetClassification interface for type CustomDataset. func (c *CustomDataset) GetDataset() *Dataset { return &Dataset{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, - Structure: c.Structure, - Schema: c.Schema, + Folder: c.Folder, LinkedServiceName: c.LinkedServiceName, Parameters: c.Parameters, - Annotations: c.Annotations, - Folder: c.Folder, - AdditionalProperties: c.AdditionalProperties, + Schema: c.Schema, + Structure: c.Structure, + Type: c.Type, } } @@ -8072,23 +8137,23 @@ type CustomEventsTrigger struct { // GetMultiplePipelineTrigger implements the MultiplePipelineTriggerClassification interface for type CustomEventsTrigger. func (c *CustomEventsTrigger) GetMultiplePipelineTrigger() *MultiplePipelineTrigger { return &MultiplePipelineTrigger{ - Pipelines: c.Pipelines, - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, + Pipelines: c.Pipelines, RuntimeState: c.RuntimeState, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } // GetTrigger implements the TriggerClassification interface for type CustomEventsTrigger. func (c *CustomEventsTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: c.Type, + AdditionalProperties: c.AdditionalProperties, + Annotations: c.Annotations, Description: c.Description, RuntimeState: c.RuntimeState, - Annotations: c.Annotations, - AdditionalProperties: c.AdditionalProperties, + Type: c.Type, } } @@ -8109,15 +8174,6 @@ type CustomEventsTriggerTypeProperties struct { SubjectEndsWith *string } -// CustomSetupBaseClassification provides polymorphic access to related types. -// Call the interface's GetCustomSetupBase() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AzPowerShellSetup, *CmdkeySetup, *ComponentSetup, *CustomSetupBase, *EnvironmentVariableSetup -type CustomSetupBaseClassification interface { - // GetCustomSetupBase returns the CustomSetupBase content of the underlying type. - GetCustomSetupBase() *CustomSetupBase -} - // CustomSetupBase - The base definition of the custom setup. type CustomSetupBase struct { // REQUIRED; The type of custom setup. @@ -8149,15 +8205,6 @@ type DWCopyCommandSettings struct { DefaultValues []*DWCopyCommandDefaultValue } -// DataFlowClassification provides polymorphic access to related types. -// Call the interface's GetDataFlow() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *DataFlow, *Flowlet, *MappingDataFlow, *WranglingDataFlow -type DataFlowClassification interface { - // GetDataFlow returns the DataFlow content of the underlying type. - GetDataFlow() *DataFlow -} - // DataFlow - Azure Data Factory nested object which contains a flow with data movements and transformations. type DataFlow struct { // REQUIRED; Type of data flow. @@ -8260,37 +8307,6 @@ type DataFlowDebugResource struct { Name *string } -// DataFlowDebugSessionClientAddDataFlowOptions contains the optional parameters for the DataFlowDebugSessionClient.AddDataFlow -// method. -type DataFlowDebugSessionClientAddDataFlowOptions struct { - // placeholder for future optional parameters -} - -// DataFlowDebugSessionClientBeginCreateOptions contains the optional parameters for the DataFlowDebugSessionClient.BeginCreate -// method. -type DataFlowDebugSessionClientBeginCreateOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// DataFlowDebugSessionClientBeginExecuteCommandOptions contains the optional parameters for the DataFlowDebugSessionClient.BeginExecuteCommand -// method. -type DataFlowDebugSessionClientBeginExecuteCommandOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// DataFlowDebugSessionClientDeleteOptions contains the optional parameters for the DataFlowDebugSessionClient.Delete method. -type DataFlowDebugSessionClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// DataFlowDebugSessionClientQueryByFactoryOptions contains the optional parameters for the DataFlowDebugSessionClient.NewQueryByFactoryPager -// method. -type DataFlowDebugSessionClientQueryByFactoryOptions struct { - // placeholder for future optional parameters -} - // DataFlowDebugSessionInfo - Data flow debug session info. type DataFlowDebugSessionInfo struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. @@ -8441,30 +8457,6 @@ type DataFlowStagingInfo struct { LinkedService *LinkedServiceReference } -// DataFlowsClientCreateOrUpdateOptions contains the optional parameters for the DataFlowsClient.CreateOrUpdate method. -type DataFlowsClientCreateOrUpdateOptions struct { - // ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be - // * for unconditional update. - IfMatch *string -} - -// DataFlowsClientDeleteOptions contains the optional parameters for the DataFlowsClient.Delete method. -type DataFlowsClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// DataFlowsClientGetOptions contains the optional parameters for the DataFlowsClient.Get method. -type DataFlowsClientGetOptions struct { - // ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was - // provided, then no content will be returned. - IfNoneMatch *string -} - -// DataFlowsClientListByFactoryOptions contains the optional parameters for the DataFlowsClient.NewListByFactoryPager method. -type DataFlowsClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // DataLakeAnalyticsUSQLActivity - Data Lake Analytics U-SQL activity. type DataLakeAnalyticsUSQLActivity struct { // REQUIRED; Activity name. @@ -8488,9 +8480,16 @@ type DataLakeAnalyticsUSQLActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -8498,26 +8497,30 @@ type DataLakeAnalyticsUSQLActivity struct { // GetActivity implements the ActivityClassification interface for type DataLakeAnalyticsUSQLActivity. func (d *DataLakeAnalyticsUSQLActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type DataLakeAnalyticsUSQLActivity. func (d *DataLakeAnalyticsUSQLActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, LinkedServiceName: d.LinkedServiceName, - Policy: d.Policy, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + Policy: d.Policy, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } @@ -8549,6 +8552,24 @@ type DataLakeAnalyticsUSQLActivityTypeProperties struct { RuntimeVersion any } +// DataMapperMapping - Source and target table mapping details. +type DataMapperMapping struct { + // This holds the user provided attribute mapping information. + AttributeMappingInfo *MapperAttributeMappings + + // The connection reference for the source connection. + SourceConnectionReference *MapperConnectionReference + + // This holds the source denormalization information used while joining multiple sources. + SourceDenormalizeInfo any + + // Name of the source table + SourceEntityName *string + + // Name of the target table + TargetEntityName *string +} + // DatabricksNotebookActivity - DatabricksNotebook activity. type DatabricksNotebookActivity struct { // REQUIRED; Activity name. @@ -8572,9 +8593,16 @@ type DatabricksNotebookActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -8582,26 +8610,30 @@ type DatabricksNotebookActivity struct { // GetActivity implements the ActivityClassification interface for type DatabricksNotebookActivity. func (d *DatabricksNotebookActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type DatabricksNotebookActivity. func (d *DatabricksNotebookActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, LinkedServiceName: d.LinkedServiceName, - Policy: d.Policy, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + Policy: d.Policy, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } @@ -8642,9 +8674,16 @@ type DatabricksSparkJarActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -8652,26 +8691,30 @@ type DatabricksSparkJarActivity struct { // GetActivity implements the ActivityClassification interface for type DatabricksSparkJarActivity. func (d *DatabricksSparkJarActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type DatabricksSparkJarActivity. func (d *DatabricksSparkJarActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, LinkedServiceName: d.LinkedServiceName, - Policy: d.Policy, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + Policy: d.Policy, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } @@ -8711,9 +8754,16 @@ type DatabricksSparkPythonActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -8721,26 +8771,30 @@ type DatabricksSparkPythonActivity struct { // GetActivity implements the ActivityClassification interface for type DatabricksSparkPythonActivity. func (d *DatabricksSparkPythonActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type DatabricksSparkPythonActivity. func (d *DatabricksSparkPythonActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, LinkedServiceName: d.LinkedServiceName, - Policy: d.Policy, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + Policy: d.Policy, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } @@ -8757,33 +8811,6 @@ type DatabricksSparkPythonActivityTypeProperties struct { Parameters []any } -// DatasetClassification provides polymorphic access to related types. -// Call the interface's GetDataset() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AmazonMWSObjectDataset, *AmazonRdsForOracleTableDataset, *AmazonRdsForSQLServerTableDataset, *AmazonRedshiftTableDataset, -// - *AmazonS3Dataset, *AvroDataset, *AzureBlobDataset, *AzureBlobFSDataset, *AzureDataExplorerTableDataset, *AzureDataLakeStoreDataset, -// - *AzureDatabricksDeltaLakeDataset, *AzureMariaDBTableDataset, *AzureMySQLTableDataset, *AzurePostgreSQLTableDataset, *AzureSQLDWTableDataset, -// - *AzureSQLMITableDataset, *AzureSQLTableDataset, *AzureSearchIndexDataset, *AzureTableDataset, *BinaryDataset, *CassandraTableDataset, -// - *CommonDataServiceForAppsEntityDataset, *ConcurObjectDataset, *CosmosDbMongoDbAPICollectionDataset, *CosmosDbSQLAPICollectionDataset, -// - *CouchbaseTableDataset, *CustomDataset, *Dataset, *Db2TableDataset, *DelimitedTextDataset, *DocumentDbCollectionDataset, -// - *DrillTableDataset, *DynamicsAXResourceDataset, *DynamicsCrmEntityDataset, *DynamicsEntityDataset, *EloquaObjectDataset, -// - *ExcelDataset, *FileShareDataset, *GoogleAdWordsObjectDataset, *GoogleBigQueryObjectDataset, *GreenplumTableDataset, -// - *HBaseObjectDataset, *HTTPDataset, *HiveObjectDataset, *HubspotObjectDataset, *ImpalaObjectDataset, *InformixTableDataset, -// - *JSONDataset, *JiraObjectDataset, *MagentoObjectDataset, *MariaDBTableDataset, *MarketoObjectDataset, *MicrosoftAccessTableDataset, -// - *MongoDbAtlasCollectionDataset, *MongoDbCollectionDataset, *MongoDbV2CollectionDataset, *MySQLTableDataset, *NetezzaTableDataset, -// - *ODataResourceDataset, *OdbcTableDataset, *Office365Dataset, *OracleServiceCloudObjectDataset, *OracleTableDataset, *OrcDataset, -// - *ParquetDataset, *PaypalObjectDataset, *PhoenixObjectDataset, *PostgreSQLTableDataset, *PrestoObjectDataset, *QuickBooksObjectDataset, -// - *RelationalTableDataset, *ResponsysObjectDataset, *RestResourceDataset, *SQLServerTableDataset, *SalesforceMarketingCloudObjectDataset, -// - *SalesforceObjectDataset, *SalesforceServiceCloudObjectDataset, *SapBwCubeDataset, *SapCloudForCustomerResourceDataset, -// - *SapEccResourceDataset, *SapHanaTableDataset, *SapOdpResourceDataset, *SapOpenHubTableDataset, *SapTableResourceDataset, -// - *ServiceNowObjectDataset, *SharePointOnlineListResourceDataset, *ShopifyObjectDataset, *SnowflakeDataset, *SparkObjectDataset, -// - *SquareObjectDataset, *SybaseTableDataset, *TeradataTableDataset, *VerticaTableDataset, *WebTableDataset, *XMLDataset, -// - *XeroObjectDataset, *ZohoObjectDataset -type DatasetClassification interface { - // GetDataset returns the Dataset content of the underlying type. - GetDataset() *Dataset -} - // Dataset - The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, // folders, and documents. type Dataset struct { @@ -8855,17 +8882,6 @@ type DatasetListResponse struct { NextLink *string } -// DatasetLocationClassification provides polymorphic access to related types. -// Call the interface's GetDatasetLocation() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AmazonS3CompatibleLocation, *AmazonS3Location, *AzureBlobFSLocation, *AzureBlobStorageLocation, *AzureDataLakeStoreLocation, -// - *AzureFileStorageLocation, *DatasetLocation, *FileServerLocation, *FtpServerLocation, *GoogleCloudStorageLocation, *HTTPServerLocation, -// - *HdfsLocation, *OracleCloudStorageLocation, *SftpLocation -type DatasetLocationClassification interface { - // GetDatasetLocation returns the DatasetLocation content of the underlying type. - GetDatasetLocation() *DatasetLocation -} - // DatasetLocation - Dataset location. type DatasetLocation struct { // REQUIRED; Type of dataset storage location. @@ -8914,15 +8930,6 @@ type DatasetResource struct { Type *string } -// DatasetStorageFormatClassification provides polymorphic access to related types. -// Call the interface's GetDatasetStorageFormat() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AvroFormat, *DatasetStorageFormat, *JSONFormat, *OrcFormat, *ParquetFormat, *TextFormat -type DatasetStorageFormatClassification interface { - // GetDatasetStorageFormat returns the DatasetStorageFormat content of the underlying type. - GetDatasetStorageFormat() *DatasetStorageFormat -} - // DatasetStorageFormat - The format definition of a storage. type DatasetStorageFormat struct { // REQUIRED; Type of dataset storage format. @@ -8941,30 +8948,6 @@ type DatasetStorageFormat struct { // GetDatasetStorageFormat implements the DatasetStorageFormatClassification interface for type DatasetStorageFormat. func (d *DatasetStorageFormat) GetDatasetStorageFormat() *DatasetStorageFormat { return d } -// DatasetsClientCreateOrUpdateOptions contains the optional parameters for the DatasetsClient.CreateOrUpdate method. -type DatasetsClientCreateOrUpdateOptions struct { - // ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * - // for unconditional update. - IfMatch *string -} - -// DatasetsClientDeleteOptions contains the optional parameters for the DatasetsClient.Delete method. -type DatasetsClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// DatasetsClientGetOptions contains the optional parameters for the DatasetsClient.Get method. -type DatasetsClientGetOptions struct { - // ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was - // provided, then no content will be returned. - IfNoneMatch *string -} - -// DatasetsClientListByFactoryOptions contains the optional parameters for the DatasetsClient.NewListByFactoryPager method. -type DatasetsClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // DataworldLinkedService - Linked service for Dataworld. type DataworldLinkedService struct { // REQUIRED; Type of linked service. @@ -8992,12 +8975,12 @@ type DataworldLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type DataworldLinkedService. func (d *DataworldLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, ConnectVia: d.ConnectVia, Description: d.Description, Parameters: d.Parameters, - Annotations: d.Annotations, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9007,8 +8990,8 @@ type DataworldLinkedServiceTypeProperties struct { APIToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // Db2LinkedService - Linked service for DB2 data source. @@ -9038,12 +9021,12 @@ type Db2LinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type Db2LinkedService. func (d *Db2LinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, ConnectVia: d.ConnectVia, Description: d.Description, Parameters: d.Parameters, - Annotations: d.Annotations, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9066,9 +9049,8 @@ type Db2LinkedServiceTypeProperties struct { Database any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // It is mutually exclusive with connectionString property. Type: string (or - // Expression with resultType string). - EncryptedCredential any + // It is mutually exclusive with connectionString property. Type: string. + EncryptedCredential *string // Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: // string (or Expression with resultType string). @@ -9120,26 +9102,26 @@ type Db2Source struct { // GetCopySource implements the CopySourceClassification interface for type Db2Source. func (d *Db2Source) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type Db2Source. func (d *Db2Source) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: d.QueryTimeout, AdditionalColumns: d.AdditionalColumns, - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + QueryTimeout: d.QueryTimeout, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9180,15 +9162,15 @@ type Db2TableDataset struct { // GetDataset implements the DatasetClassification interface for type Db2TableDataset. func (d *Db2TableDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -9227,9 +9209,16 @@ type DeleteActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -9237,26 +9226,30 @@ type DeleteActivity struct { // GetActivity implements the ActivityClassification interface for type DeleteActivity. func (d *DeleteActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type DeleteActivity. func (d *DeleteActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: d.AdditionalProperties, + DependsOn: d.DependsOn, + Description: d.Description, LinkedServiceName: d.LinkedServiceName, - Policy: d.Policy, Name: d.Name, + OnInactiveMarkAs: d.OnInactiveMarkAs, + Policy: d.Policy, + State: d.State, Type: d.Type, - Description: d.Description, - DependsOn: d.DependsOn, UserProperties: d.UserProperties, - AdditionalProperties: d.AdditionalProperties, } } @@ -9326,15 +9319,15 @@ type DelimitedTextDataset struct { // GetDataset implements the DatasetClassification interface for type DelimitedTextDataset. func (d *DelimitedTextDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -9395,8 +9388,8 @@ type DelimitedTextReadSettings struct { // GetFormatReadSettings implements the FormatReadSettingsClassification interface for type DelimitedTextReadSettings. func (d *DelimitedTextReadSettings) GetFormatReadSettings() *FormatReadSettings { return &FormatReadSettings{ - Type: d.Type, AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9436,14 +9429,14 @@ type DelimitedTextSink struct { // GetCopySink implements the CopySinkClassification interface for type DelimitedTextSink. func (d *DelimitedTextSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + SinkRetryCount: d.SinkRetryCount, + SinkRetryWait: d.SinkRetryWait, Type: d.Type, WriteBatchSize: d.WriteBatchSize, WriteBatchTimeout: d.WriteBatchTimeout, - SinkRetryCount: d.SinkRetryCount, - SinkRetryWait: d.SinkRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, } } @@ -9481,12 +9474,12 @@ type DelimitedTextSource struct { // GetCopySource implements the CopySourceClassification interface for type DelimitedTextSource. func (d *DelimitedTextSource) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9516,20 +9509,11 @@ type DelimitedTextWriteSettings struct { // GetFormatWriteSettings implements the FormatWriteSettingsClassification interface for type DelimitedTextWriteSettings. func (d *DelimitedTextWriteSettings) GetFormatWriteSettings() *FormatWriteSettings { return &FormatWriteSettings{ - Type: d.Type, AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } -// DependencyReferenceClassification provides polymorphic access to related types. -// Call the interface's GetDependencyReference() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *DependencyReference, *SelfDependencyTumblingWindowTriggerReference, *TriggerDependencyReference, *TumblingWindowTriggerDependencyReference -type DependencyReferenceClassification interface { - // GetDependencyReference returns the DependencyReference content of the underlying type. - GetDependencyReference() *DependencyReference -} - // DependencyReference - Referenced dependency. type DependencyReference struct { // REQUIRED; The type of dependency reference. @@ -9590,15 +9574,15 @@ type DocumentDbCollectionDataset struct { // GetDataset implements the DatasetClassification interface for type DocumentDbCollectionDataset. func (d *DocumentDbCollectionDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -9645,14 +9629,14 @@ type DocumentDbCollectionSink struct { // GetCopySink implements the CopySinkClassification interface for type DocumentDbCollectionSink. func (d *DocumentDbCollectionSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + SinkRetryCount: d.SinkRetryCount, + SinkRetryWait: d.SinkRetryWait, Type: d.Type, WriteBatchSize: d.WriteBatchSize, WriteBatchTimeout: d.WriteBatchTimeout, - SinkRetryCount: d.SinkRetryCount, - SinkRetryWait: d.SinkRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, } } @@ -9693,12 +9677,12 @@ type DocumentDbCollectionSource struct { // GetCopySource implements the CopySourceClassification interface for type DocumentDbCollectionSource. func (d *DocumentDbCollectionSource) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9741,12 +9725,12 @@ type DrillLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type DrillLinkedService. func (d *DrillLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, ConnectVia: d.ConnectVia, Description: d.Description, Parameters: d.Parameters, - Annotations: d.Annotations, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9756,8 +9740,8 @@ type DrillLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Pwd *AzureKeyVaultSecretReference @@ -9797,26 +9781,26 @@ type DrillSource struct { // GetCopySource implements the CopySourceClassification interface for type DrillSource. func (d *DrillSource) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type DrillSource. func (d *DrillSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: d.QueryTimeout, AdditionalColumns: d.AdditionalColumns, - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + QueryTimeout: d.QueryTimeout, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9857,15 +9841,15 @@ type DrillTableDataset struct { // GetDataset implements the DatasetClassification interface for type DrillTableDataset. func (d *DrillTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -9896,12 +9880,12 @@ type DynamicsAXLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type DynamicsAXLinkedService. func (d *DynamicsAXLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, ConnectVia: d.ConnectVia, Description: d.Description, Parameters: d.Parameters, - Annotations: d.Annotations, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -9927,8 +9911,8 @@ type DynamicsAXLinkedServiceTypeProperties struct { URL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // DynamicsAXResourceDataset - The path of the Dynamics AX OData entity. @@ -9968,15 +9952,15 @@ type DynamicsAXResourceDataset struct { // GetDataset implements the DatasetClassification interface for type DynamicsAXResourceDataset. func (d *DynamicsAXResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -10025,26 +10009,26 @@ type DynamicsAXSource struct { // GetCopySource implements the CopySourceClassification interface for type DynamicsAXSource. func (d *DynamicsAXSource) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type DynamicsAXSource. func (d *DynamicsAXSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: d.QueryTimeout, AdditionalColumns: d.AdditionalColumns, - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + QueryTimeout: d.QueryTimeout, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -10085,15 +10069,15 @@ type DynamicsCrmEntityDataset struct { // GetDataset implements the DatasetClassification interface for type DynamicsCrmEntityDataset. func (d *DynamicsCrmEntityDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -10130,12 +10114,12 @@ type DynamicsCrmLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type DynamicsCrmLinkedService. func (d *DynamicsCrmLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, ConnectVia: d.ConnectVia, Description: d.Description, Parameters: d.Parameters, - Annotations: d.Annotations, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -10151,8 +10135,8 @@ type DynamicsCrmLinkedServiceTypeProperties struct { DeploymentType any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. // Type: string (or Expression with resultType string). @@ -10234,14 +10218,14 @@ type DynamicsCrmSink struct { // GetCopySink implements the CopySinkClassification interface for type DynamicsCrmSink. func (d *DynamicsCrmSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + SinkRetryCount: d.SinkRetryCount, + SinkRetryWait: d.SinkRetryWait, Type: d.Type, WriteBatchSize: d.WriteBatchSize, WriteBatchTimeout: d.WriteBatchTimeout, - SinkRetryCount: d.SinkRetryCount, - SinkRetryWait: d.SinkRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, } } @@ -10277,12 +10261,12 @@ type DynamicsCrmSource struct { // GetCopySource implements the CopySourceClassification interface for type DynamicsCrmSource. func (d *DynamicsCrmSource) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -10323,15 +10307,15 @@ type DynamicsEntityDataset struct { // GetDataset implements the DatasetClassification interface for type DynamicsEntityDataset. func (d *DynamicsEntityDataset) GetDataset() *Dataset { return &Dataset{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, Description: d.Description, - Structure: d.Structure, - Schema: d.Schema, + Folder: d.Folder, LinkedServiceName: d.LinkedServiceName, Parameters: d.Parameters, - Annotations: d.Annotations, - Folder: d.Folder, - AdditionalProperties: d.AdditionalProperties, + Schema: d.Schema, + Structure: d.Structure, + Type: d.Type, } } @@ -10368,12 +10352,12 @@ type DynamicsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type DynamicsLinkedService. func (d *DynamicsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + Annotations: d.Annotations, ConnectVia: d.ConnectVia, Description: d.Description, Parameters: d.Parameters, - Annotations: d.Annotations, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -10392,8 +10376,8 @@ type DynamicsLinkedServiceTypeProperties struct { Credential *CredentialReference // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: // string (or Expression with resultType string). @@ -10475,14 +10459,14 @@ type DynamicsSink struct { // GetCopySink implements the CopySinkClassification interface for type DynamicsSink. func (d *DynamicsSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, + SinkRetryCount: d.SinkRetryCount, + SinkRetryWait: d.SinkRetryWait, Type: d.Type, WriteBatchSize: d.WriteBatchSize, WriteBatchTimeout: d.WriteBatchTimeout, - SinkRetryCount: d.SinkRetryCount, - SinkRetryWait: d.SinkRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, } } @@ -10518,12 +10502,12 @@ type DynamicsSource struct { // GetCopySource implements the CopySourceClassification interface for type DynamicsSource. func (d *DynamicsSource) GetCopySource() *CopySource { return &CopySource{ - Type: d.Type, + AdditionalProperties: d.AdditionalProperties, + DisableMetricsCollection: d.DisableMetricsCollection, + MaxConcurrentConnections: d.MaxConcurrentConnections, SourceRetryCount: d.SourceRetryCount, SourceRetryWait: d.SourceRetryWait, - MaxConcurrentConnections: d.MaxConcurrentConnections, - DisableMetricsCollection: d.DisableMetricsCollection, - AdditionalProperties: d.AdditionalProperties, + Type: d.Type, } } @@ -10554,12 +10538,12 @@ type EloquaLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type EloquaLinkedService. func (e *EloquaLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: e.Type, + AdditionalProperties: e.AdditionalProperties, + Annotations: e.Annotations, ConnectVia: e.ConnectVia, Description: e.Description, Parameters: e.Parameters, - Annotations: e.Annotations, - AdditionalProperties: e.AdditionalProperties, + Type: e.Type, } } @@ -10572,8 +10556,8 @@ type EloquaLinkedServiceTypeProperties struct { Username any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password corresponding to the user name. Password SecretBaseClassification @@ -10626,15 +10610,15 @@ type EloquaObjectDataset struct { // GetDataset implements the DatasetClassification interface for type EloquaObjectDataset. func (e *EloquaObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: e.Type, + AdditionalProperties: e.AdditionalProperties, + Annotations: e.Annotations, Description: e.Description, - Structure: e.Structure, - Schema: e.Schema, + Folder: e.Folder, LinkedServiceName: e.LinkedServiceName, Parameters: e.Parameters, - Annotations: e.Annotations, - Folder: e.Folder, - AdditionalProperties: e.AdditionalProperties, + Schema: e.Schema, + Structure: e.Structure, + Type: e.Type, } } @@ -10672,26 +10656,26 @@ type EloquaSource struct { // GetCopySource implements the CopySourceClassification interface for type EloquaSource. func (e *EloquaSource) GetCopySource() *CopySource { return &CopySource{ - Type: e.Type, + AdditionalProperties: e.AdditionalProperties, + DisableMetricsCollection: e.DisableMetricsCollection, + MaxConcurrentConnections: e.MaxConcurrentConnections, SourceRetryCount: e.SourceRetryCount, SourceRetryWait: e.SourceRetryWait, - MaxConcurrentConnections: e.MaxConcurrentConnections, - DisableMetricsCollection: e.DisableMetricsCollection, - AdditionalProperties: e.AdditionalProperties, + Type: e.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type EloquaSource. func (e *EloquaSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: e.QueryTimeout, AdditionalColumns: e.AdditionalColumns, - Type: e.Type, + AdditionalProperties: e.AdditionalProperties, + DisableMetricsCollection: e.DisableMetricsCollection, + MaxConcurrentConnections: e.MaxConcurrentConnections, + QueryTimeout: e.QueryTimeout, SourceRetryCount: e.SourceRetryCount, SourceRetryWait: e.SourceRetryWait, - MaxConcurrentConnections: e.MaxConcurrentConnections, - DisableMetricsCollection: e.DisableMetricsCollection, - AdditionalProperties: e.AdditionalProperties, + Type: e.Type, } } @@ -10782,15 +10766,15 @@ type ExcelDataset struct { // GetDataset implements the DatasetClassification interface for type ExcelDataset. func (e *ExcelDataset) GetDataset() *Dataset { return &Dataset{ - Type: e.Type, + AdditionalProperties: e.AdditionalProperties, + Annotations: e.Annotations, Description: e.Description, - Structure: e.Structure, - Schema: e.Schema, + Folder: e.Folder, LinkedServiceName: e.LinkedServiceName, Parameters: e.Parameters, - Annotations: e.Annotations, - Folder: e.Folder, - AdditionalProperties: e.AdditionalProperties, + Schema: e.Schema, + Structure: e.Structure, + Type: e.Type, } } @@ -10851,12 +10835,12 @@ type ExcelSource struct { // GetCopySource implements the CopySourceClassification interface for type ExcelSource. func (e *ExcelSource) GetCopySource() *CopySource { return &CopySource{ - Type: e.Type, + AdditionalProperties: e.AdditionalProperties, + DisableMetricsCollection: e.DisableMetricsCollection, + MaxConcurrentConnections: e.MaxConcurrentConnections, SourceRetryCount: e.SourceRetryCount, SourceRetryWait: e.SourceRetryWait, - MaxConcurrentConnections: e.MaxConcurrentConnections, - DisableMetricsCollection: e.DisableMetricsCollection, - AdditionalProperties: e.AdditionalProperties, + Type: e.Type, } } @@ -10883,9 +10867,16 @@ type ExecuteDataFlowActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -10893,26 +10884,30 @@ type ExecuteDataFlowActivity struct { // GetActivity implements the ActivityClassification interface for type ExecuteDataFlowActivity. func (e *ExecuteDataFlowActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type ExecuteDataFlowActivity. func (e *ExecuteDataFlowActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, LinkedServiceName: e.LinkedServiceName, - Policy: e.Policy, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + Policy: e.Policy, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } @@ -10977,9 +10972,16 @@ type ExecutePipelineActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Execute pipeline activity policy. Policy *ExecutePipelineActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -10987,24 +10989,28 @@ type ExecutePipelineActivity struct { // GetActivity implements the ActivityClassification interface for type ExecutePipelineActivity. func (e *ExecutePipelineActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type ExecutePipelineActivity. func (e *ExecutePipelineActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } @@ -11088,9 +11094,16 @@ type ExecuteSSISPackageActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -11098,26 +11111,30 @@ type ExecuteSSISPackageActivity struct { // GetActivity implements the ActivityClassification interface for type ExecuteSSISPackageActivity. func (e *ExecuteSSISPackageActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type ExecuteSSISPackageActivity. func (e *ExecuteSSISPackageActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, LinkedServiceName: e.LinkedServiceName, - Policy: e.Policy, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + Policy: e.Policy, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } @@ -11181,9 +11198,16 @@ type ExecuteWranglingDataflowActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -11191,30 +11215,17 @@ type ExecuteWranglingDataflowActivity struct { // GetActivity implements the ActivityClassification interface for type ExecuteWranglingDataflowActivity. func (e *ExecuteWranglingDataflowActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } -// ExecutionActivityClassification provides polymorphic access to related types. -// Call the interface's GetExecutionActivity() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AzureDataExplorerCommandActivity, *AzureFunctionActivity, *AzureMLBatchExecutionActivity, *AzureMLExecutePipelineActivity, -// - *AzureMLUpdateResourceActivity, *CopyActivity, *CustomActivity, *DataLakeAnalyticsUSQLActivity, *DatabricksNotebookActivity, -// - *DatabricksSparkJarActivity, *DatabricksSparkPythonActivity, *DeleteActivity, *ExecuteDataFlowActivity, *ExecuteSSISPackageActivity, -// - *ExecutionActivity, *GetMetadataActivity, *HDInsightHiveActivity, *HDInsightMapReduceActivity, *HDInsightPigActivity, -// - *HDInsightSparkActivity, *HDInsightStreamingActivity, *LookupActivity, *SQLServerStoredProcedureActivity, *ScriptActivity, -// - *SynapseNotebookActivity, *SynapseSparkJobDefinitionActivity, *WebActivity -type ExecutionActivityClassification interface { - ActivityClassification - // GetExecutionActivity returns the ExecutionActivity content of the underlying type. - GetExecutionActivity() *ExecutionActivity -} - // ExecutionActivity - Base class for all execution activities. type ExecutionActivity struct { // REQUIRED; Activity name. @@ -11235,9 +11246,16 @@ type ExecutionActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -11245,27 +11263,20 @@ type ExecutionActivity struct { // GetActivity implements the ActivityClassification interface for type ExecutionActivity. func (e *ExecutionActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: e.AdditionalProperties, + DependsOn: e.DependsOn, + Description: e.Description, Name: e.Name, + OnInactiveMarkAs: e.OnInactiveMarkAs, + State: e.State, Type: e.Type, - Description: e.Description, - DependsOn: e.DependsOn, UserProperties: e.UserProperties, - AdditionalProperties: e.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type ExecutionActivity. func (e *ExecutionActivity) GetExecutionActivity() *ExecutionActivity { return e } -// ExportSettingsClassification provides polymorphic access to related types. -// Call the interface's GetExportSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AzureDatabricksDeltaLakeExportCommand, *ExportSettings, *SnowflakeExportCopyCommand -type ExportSettingsClassification interface { - // GetExportSettings returns the ExportSettings content of the underlying type. - GetExportSettings() *ExportSettings -} - // ExportSettings - Export command settings. type ExportSettings struct { // REQUIRED; The export setting type. @@ -11290,24 +11301,6 @@ type ExposureControlBatchResponse struct { ExposureControlResponses []*ExposureControlResponse } -// ExposureControlClientGetFeatureValueByFactoryOptions contains the optional parameters for the ExposureControlClient.GetFeatureValueByFactory -// method. -type ExposureControlClientGetFeatureValueByFactoryOptions struct { - // placeholder for future optional parameters -} - -// ExposureControlClientGetFeatureValueOptions contains the optional parameters for the ExposureControlClient.GetFeatureValue -// method. -type ExposureControlClientGetFeatureValueOptions struct { - // placeholder for future optional parameters -} - -// ExposureControlClientQueryFeatureValuesByFactoryOptions contains the optional parameters for the ExposureControlClient.QueryFeatureValuesByFactory -// method. -type ExposureControlClientQueryFeatureValuesByFactoryOptions struct { - // placeholder for future optional parameters -} - // ExposureControlRequest - The exposure control request. type ExposureControlRequest struct { // The feature name. @@ -11335,58 +11328,6 @@ type Expression struct { Value *string } -// FactoriesClientConfigureFactoryRepoOptions contains the optional parameters for the FactoriesClient.ConfigureFactoryRepo -// method. -type FactoriesClientConfigureFactoryRepoOptions struct { - // placeholder for future optional parameters -} - -// FactoriesClientCreateOrUpdateOptions contains the optional parameters for the FactoriesClient.CreateOrUpdate method. -type FactoriesClientCreateOrUpdateOptions struct { - // ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * - // for unconditional update. - IfMatch *string -} - -// FactoriesClientDeleteOptions contains the optional parameters for the FactoriesClient.Delete method. -type FactoriesClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// FactoriesClientGetDataPlaneAccessOptions contains the optional parameters for the FactoriesClient.GetDataPlaneAccess method. -type FactoriesClientGetDataPlaneAccessOptions struct { - // placeholder for future optional parameters -} - -// FactoriesClientGetGitHubAccessTokenOptions contains the optional parameters for the FactoriesClient.GetGitHubAccessToken -// method. -type FactoriesClientGetGitHubAccessTokenOptions struct { - // placeholder for future optional parameters -} - -// FactoriesClientGetOptions contains the optional parameters for the FactoriesClient.Get method. -type FactoriesClientGetOptions struct { - // ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was - // provided, then no content will be returned. - IfNoneMatch *string -} - -// FactoriesClientListByResourceGroupOptions contains the optional parameters for the FactoriesClient.NewListByResourceGroupPager -// method. -type FactoriesClientListByResourceGroupOptions struct { - // placeholder for future optional parameters -} - -// FactoriesClientListOptions contains the optional parameters for the FactoriesClient.NewListPager method. -type FactoriesClientListOptions struct { - // placeholder for future optional parameters -} - -// FactoriesClientUpdateOptions contains the optional parameters for the FactoriesClient.Update method. -type FactoriesClientUpdateOptions struct { - // placeholder for future optional parameters -} - // Factory resource type. type Factory struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. @@ -11453,13 +11394,13 @@ type FactoryGitHubConfiguration struct { // GetFactoryRepoConfiguration implements the FactoryRepoConfigurationClassification interface for type FactoryGitHubConfiguration. func (f *FactoryGitHubConfiguration) GetFactoryRepoConfiguration() *FactoryRepoConfiguration { return &FactoryRepoConfiguration{ - Type: f.Type, AccountName: f.AccountName, - RepositoryName: f.RepositoryName, CollaborationBranch: f.CollaborationBranch, - RootFolder: f.RootFolder, - LastCommitID: f.LastCommitID, DisablePublish: f.DisablePublish, + LastCommitID: f.LastCommitID, + RepositoryName: f.RepositoryName, + RootFolder: f.RootFolder, + Type: f.Type, } } @@ -11514,15 +11455,6 @@ type FactoryProperties struct { Version *string } -// FactoryRepoConfigurationClassification provides polymorphic access to related types. -// Call the interface's GetFactoryRepoConfiguration() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *FactoryGitHubConfiguration, *FactoryRepoConfiguration, *FactoryVSTSConfiguration -type FactoryRepoConfigurationClassification interface { - // GetFactoryRepoConfiguration returns the FactoryRepoConfiguration content of the underlying type. - GetFactoryRepoConfiguration() *FactoryRepoConfiguration -} - // FactoryRepoConfiguration - Factory's git repo information. type FactoryRepoConfiguration struct { // REQUIRED; Account name. @@ -11610,13 +11542,13 @@ type FactoryVSTSConfiguration struct { // GetFactoryRepoConfiguration implements the FactoryRepoConfigurationClassification interface for type FactoryVSTSConfiguration. func (f *FactoryVSTSConfiguration) GetFactoryRepoConfiguration() *FactoryRepoConfiguration { return &FactoryRepoConfiguration{ - Type: f.Type, AccountName: f.AccountName, - RepositoryName: f.RepositoryName, CollaborationBranch: f.CollaborationBranch, - RootFolder: f.RootFolder, - LastCommitID: f.LastCommitID, DisablePublish: f.DisablePublish, + LastCommitID: f.LastCommitID, + RepositoryName: f.RepositoryName, + RootFolder: f.RootFolder, + Type: f.Type, } } @@ -11643,6 +11575,13 @@ type FailActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -11650,24 +11589,28 @@ type FailActivity struct { // GetActivity implements the ActivityClassification interface for type FailActivity. func (f *FailActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: f.AdditionalProperties, + DependsOn: f.DependsOn, + Description: f.Description, Name: f.Name, + OnInactiveMarkAs: f.OnInactiveMarkAs, + State: f.State, Type: f.Type, - Description: f.Description, - DependsOn: f.DependsOn, UserProperties: f.UserProperties, - AdditionalProperties: f.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type FailActivity. func (f *FailActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: f.AdditionalProperties, + DependsOn: f.DependsOn, + Description: f.Description, Name: f.Name, + OnInactiveMarkAs: f.OnInactiveMarkAs, + State: f.State, Type: f.Type, - Description: f.Description, - DependsOn: f.DependsOn, UserProperties: f.UserProperties, - AdditionalProperties: f.AdditionalProperties, } } @@ -11710,12 +11653,12 @@ type FileServerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type FileServerLinkedService. func (f *FileServerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: f.Type, + AdditionalProperties: f.AdditionalProperties, + Annotations: f.Annotations, ConnectVia: f.ConnectVia, Description: f.Description, Parameters: f.Parameters, - Annotations: f.Annotations, - AdditionalProperties: f.AdditionalProperties, + Type: f.Type, } } @@ -11725,8 +11668,8 @@ type FileServerLinkedServiceTypeProperties struct { Host any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password to logon the server. Password SecretBaseClassification @@ -11753,10 +11696,10 @@ type FileServerLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type FileServerLocation. func (f *FileServerLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: f.Type, - FolderPath: f.FolderPath, - FileName: f.FileName, AdditionalProperties: f.AdditionalProperties, + FileName: f.FileName, + FolderPath: f.FolderPath, + Type: f.Type, } } @@ -11775,8 +11718,8 @@ type FileServerReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression // with resultType string). @@ -11812,10 +11755,10 @@ type FileServerReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type FileServerReadSettings. func (f *FileServerReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: f.Type, - MaxConcurrentConnections: f.MaxConcurrentConnections, - DisableMetricsCollection: f.DisableMetricsCollection, AdditionalProperties: f.AdditionalProperties, + DisableMetricsCollection: f.DisableMetricsCollection, + MaxConcurrentConnections: f.MaxConcurrentConnections, + Type: f.Type, } } @@ -11840,11 +11783,11 @@ type FileServerWriteSettings struct { // GetStoreWriteSettings implements the StoreWriteSettingsClassification interface for type FileServerWriteSettings. func (f *FileServerWriteSettings) GetStoreWriteSettings() *StoreWriteSettings { return &StoreWriteSettings{ - Type: f.Type, - MaxConcurrentConnections: f.MaxConcurrentConnections, - DisableMetricsCollection: f.DisableMetricsCollection, - CopyBehavior: f.CopyBehavior, AdditionalProperties: f.AdditionalProperties, + CopyBehavior: f.CopyBehavior, + DisableMetricsCollection: f.DisableMetricsCollection, + MaxConcurrentConnections: f.MaxConcurrentConnections, + Type: f.Type, } } @@ -11885,15 +11828,15 @@ type FileShareDataset struct { // GetDataset implements the DatasetClassification interface for type FileShareDataset. func (f *FileShareDataset) GetDataset() *Dataset { return &Dataset{ - Type: f.Type, + AdditionalProperties: f.AdditionalProperties, + Annotations: f.Annotations, Description: f.Description, - Structure: f.Structure, - Schema: f.Schema, + Folder: f.Folder, LinkedServiceName: f.LinkedServiceName, Parameters: f.Parameters, - Annotations: f.Annotations, - Folder: f.Folder, - AdditionalProperties: f.AdditionalProperties, + Schema: f.Schema, + Structure: f.Structure, + Type: f.Type, } } @@ -11955,14 +11898,14 @@ type FileSystemSink struct { // GetCopySink implements the CopySinkClassification interface for type FileSystemSink. func (f *FileSystemSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: f.AdditionalProperties, + DisableMetricsCollection: f.DisableMetricsCollection, + MaxConcurrentConnections: f.MaxConcurrentConnections, + SinkRetryCount: f.SinkRetryCount, + SinkRetryWait: f.SinkRetryWait, Type: f.Type, WriteBatchSize: f.WriteBatchSize, WriteBatchTimeout: f.WriteBatchTimeout, - SinkRetryCount: f.SinkRetryCount, - SinkRetryWait: f.SinkRetryWait, - MaxConcurrentConnections: f.MaxConcurrentConnections, - DisableMetricsCollection: f.DisableMetricsCollection, - AdditionalProperties: f.AdditionalProperties, } } @@ -11998,12 +11941,12 @@ type FileSystemSource struct { // GetCopySource implements the CopySourceClassification interface for type FileSystemSource. func (f *FileSystemSource) GetCopySource() *CopySource { return &CopySource{ - Type: f.Type, + AdditionalProperties: f.AdditionalProperties, + DisableMetricsCollection: f.DisableMetricsCollection, + MaxConcurrentConnections: f.MaxConcurrentConnections, SourceRetryCount: f.SourceRetryCount, SourceRetryWait: f.SourceRetryWait, - MaxConcurrentConnections: f.MaxConcurrentConnections, - DisableMetricsCollection: f.DisableMetricsCollection, - AdditionalProperties: f.AdditionalProperties, + Type: f.Type, } } @@ -12027,6 +11970,13 @@ type FilterActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -12034,24 +11984,28 @@ type FilterActivity struct { // GetActivity implements the ActivityClassification interface for type FilterActivity. func (f *FilterActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: f.AdditionalProperties, + DependsOn: f.DependsOn, + Description: f.Description, Name: f.Name, + OnInactiveMarkAs: f.OnInactiveMarkAs, + State: f.State, Type: f.Type, - Description: f.Description, - DependsOn: f.DependsOn, UserProperties: f.UserProperties, - AdditionalProperties: f.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type FilterActivity. func (f *FilterActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: f.AdditionalProperties, + DependsOn: f.DependsOn, + Description: f.Description, Name: f.Name, + OnInactiveMarkAs: f.OnInactiveMarkAs, + State: f.State, Type: f.Type, - Description: f.Description, - DependsOn: f.DependsOn, UserProperties: f.UserProperties, - AdditionalProperties: f.AdditionalProperties, } } @@ -12085,10 +12039,10 @@ type Flowlet struct { // GetDataFlow implements the DataFlowClassification interface for type Flowlet. func (f *Flowlet) GetDataFlow() *DataFlow { return &DataFlow{ - Type: f.Type, - Description: f.Description, Annotations: f.Annotations, + Description: f.Description, Folder: f.Folder, + Type: f.Type, } } @@ -12130,6 +12084,13 @@ type ForEachActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -12137,24 +12098,28 @@ type ForEachActivity struct { // GetActivity implements the ActivityClassification interface for type ForEachActivity. func (f *ForEachActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: f.AdditionalProperties, + DependsOn: f.DependsOn, + Description: f.Description, Name: f.Name, + OnInactiveMarkAs: f.OnInactiveMarkAs, + State: f.State, Type: f.Type, - Description: f.Description, - DependsOn: f.DependsOn, UserProperties: f.UserProperties, - AdditionalProperties: f.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type ForEachActivity. func (f *ForEachActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: f.AdditionalProperties, + DependsOn: f.DependsOn, + Description: f.Description, Name: f.Name, + OnInactiveMarkAs: f.OnInactiveMarkAs, + State: f.State, Type: f.Type, - Description: f.Description, - DependsOn: f.DependsOn, UserProperties: f.UserProperties, - AdditionalProperties: f.AdditionalProperties, } } @@ -12173,15 +12138,6 @@ type ForEachActivityTypeProperties struct { IsSequential *bool } -// FormatReadSettingsClassification provides polymorphic access to related types. -// Call the interface's GetFormatReadSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *BinaryReadSettings, *DelimitedTextReadSettings, *FormatReadSettings, *JSONReadSettings, *XMLReadSettings -type FormatReadSettingsClassification interface { - // GetFormatReadSettings returns the FormatReadSettings content of the underlying type. - GetFormatReadSettings() *FormatReadSettings -} - // FormatReadSettings - Format read settings. type FormatReadSettings struct { // REQUIRED; The read setting type. @@ -12194,15 +12150,6 @@ type FormatReadSettings struct { // GetFormatReadSettings implements the FormatReadSettingsClassification interface for type FormatReadSettings. func (f *FormatReadSettings) GetFormatReadSettings() *FormatReadSettings { return f } -// FormatWriteSettingsClassification provides polymorphic access to related types. -// Call the interface's GetFormatWriteSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AvroWriteSettings, *DelimitedTextWriteSettings, *FormatWriteSettings, *JSONWriteSettings, *OrcWriteSettings, *ParquetWriteSettings -type FormatWriteSettingsClassification interface { - // GetFormatWriteSettings returns the FormatWriteSettings content of the underlying type. - GetFormatWriteSettings() *FormatWriteSettings -} - // FormatWriteSettings - Format write settings. type FormatWriteSettings struct { // REQUIRED; The write setting type. @@ -12233,8 +12180,8 @@ type FtpReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -12250,8 +12197,8 @@ type FtpReadSettings struct { // boolean). Recursive any - // Specify whether to use binary transfer mode for FTP stores. - UseBinaryTransfer *bool + // Specify whether to use binary transfer mode for FTP stores. Type: boolean (or Expression with resultType boolean). + UseBinaryTransfer any // Ftp wildcardFileName. Type: string (or Expression with resultType string). WildcardFileName any @@ -12263,10 +12210,10 @@ type FtpReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type FtpReadSettings. func (f *FtpReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: f.Type, - MaxConcurrentConnections: f.MaxConcurrentConnections, - DisableMetricsCollection: f.DisableMetricsCollection, AdditionalProperties: f.AdditionalProperties, + DisableMetricsCollection: f.DisableMetricsCollection, + MaxConcurrentConnections: f.MaxConcurrentConnections, + Type: f.Type, } } @@ -12297,12 +12244,12 @@ type FtpServerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type FtpServerLinkedService. func (f *FtpServerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: f.Type, + AdditionalProperties: f.AdditionalProperties, + Annotations: f.Annotations, ConnectVia: f.ConnectVia, Description: f.Description, Parameters: f.Parameters, - Annotations: f.Annotations, - AdditionalProperties: f.AdditionalProperties, + Type: f.Type, } } @@ -12323,8 +12270,8 @@ type FtpServerLinkedServiceTypeProperties struct { EnableServerCertificateValidation any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password to logon the FTP server. Password SecretBaseClassification @@ -12355,10 +12302,10 @@ type FtpServerLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type FtpServerLocation. func (f *FtpServerLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: f.Type, - FolderPath: f.FolderPath, - FileName: f.FileName, AdditionalProperties: f.AdditionalProperties, + FileName: f.FileName, + FolderPath: f.FolderPath, + Type: f.Type, } } @@ -12391,9 +12338,16 @@ type GetMetadataActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -12401,26 +12355,30 @@ type GetMetadataActivity struct { // GetActivity implements the ActivityClassification interface for type GetMetadataActivity. func (g *GetMetadataActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: g.AdditionalProperties, + DependsOn: g.DependsOn, + Description: g.Description, Name: g.Name, + OnInactiveMarkAs: g.OnInactiveMarkAs, + State: g.State, Type: g.Type, - Description: g.Description, - DependsOn: g.DependsOn, UserProperties: g.UserProperties, - AdditionalProperties: g.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type GetMetadataActivity. func (g *GetMetadataActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: g.AdditionalProperties, + DependsOn: g.DependsOn, + Description: g.Description, LinkedServiceName: g.LinkedServiceName, - Policy: g.Policy, Name: g.Name, + OnInactiveMarkAs: g.OnInactiveMarkAs, + Policy: g.Policy, + State: g.State, Type: g.Type, - Description: g.Description, - DependsOn: g.DependsOn, UserProperties: g.UserProperties, - AdditionalProperties: g.AdditionalProperties, } } @@ -12511,28 +12469,6 @@ type GlobalParameterSpecification struct { Value any } -// GlobalParametersClientCreateOrUpdateOptions contains the optional parameters for the GlobalParametersClient.CreateOrUpdate -// method. -type GlobalParametersClientCreateOrUpdateOptions struct { - // placeholder for future optional parameters -} - -// GlobalParametersClientDeleteOptions contains the optional parameters for the GlobalParametersClient.Delete method. -type GlobalParametersClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// GlobalParametersClientGetOptions contains the optional parameters for the GlobalParametersClient.Get method. -type GlobalParametersClientGetOptions struct { - // placeholder for future optional parameters -} - -// GlobalParametersClientListByFactoryOptions contains the optional parameters for the GlobalParametersClient.NewListByFactoryPager -// method. -type GlobalParametersClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // GoogleAdWordsLinkedService - Google AdWords service linked service. type GoogleAdWordsLinkedService struct { // REQUIRED; Type of linked service. @@ -12560,12 +12496,12 @@ type GoogleAdWordsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type GoogleAdWordsLinkedService. func (g *GoogleAdWordsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, ConnectVia: g.ConnectVia, Description: g.Description, Parameters: g.Parameters, - Annotations: g.Annotations, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -12574,7 +12510,8 @@ type GoogleAdWordsLinkedServiceTypeProperties struct { // The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. AuthenticationType *GoogleAdWordsAuthenticationType - // The Client customer ID of the AdWords account that you want to fetch report data for. + // The Client customer ID of the AdWords account that you want to fetch report data for. Type: string (or Expression with + // resultType string). ClientCustomerID any // The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType @@ -12584,34 +12521,53 @@ type GoogleAdWordsLinkedServiceTypeProperties struct { // The client secret of the google application used to acquire the refresh token. ClientSecret SecretBaseClassification - // Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked service. Type: - // object. + // (Deprecated) Properties used to connect to GoogleAds. It is mutually exclusive with any other properties in the linked + // service. Type: object. ConnectionProperties any // The developer token associated with the manager account that you use to grant access to the AdWords API. DeveloperToken SecretBaseClassification - // The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. + // The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. Type: string + // (or Expression with resultType string). Email any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string - // The full path to the .p12 key file that is used to authenticate the service account email address and can only be used - // on self-hosted IR. + // The Google Ads API major version such as v14. The supported major versions could be found on https://developers.google.com/google-ads/api/docs/sunset-dates#timetable. + // Type: string (or Expression with + // resultType string). + GoogleAdsAPIVersion any + + // (Deprecated) The full path to the .p12 key file that is used to authenticate the service account email address and can + // only be used on self-hosted IR. Type: string (or Expression with resultType + // string). KeyFilePath any + // The customer ID of the Google Ads Manager account through which you want to fetch report data of specific Customer. Type: + // string (or Expression with resultType string). + LoginCustomerID any + + // The private key that is used to authenticate the service account email address and can only be used on self-hosted IR. + PrivateKey SecretBaseClassification + // The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. RefreshToken SecretBaseClassification - // The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This - // property can only be set when using SSL on self-hosted IR. The default value - // is the cacerts.pem file installed with the IR. + // Specifies whether to use the legacy data type mappings, which maps float, int32 and int64 from Google to string. Do not + // set this to true unless you want to keep backward compatibility with legacy + // driver's data type mappings. Type: boolean (or Expression with resultType boolean). + SupportLegacyDataTypes any + + // (Deprecated) The full path of the .pem file containing trusted CA certificates for verifying the server when connecting + // over SSL. This property can only be set when using SSL on self-hosted IR. The + // default value is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). TrustedCertPath any - // Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is - // false. + // (Deprecated) Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default + // value is false. Type: boolean (or Expression with resultType boolean). UseSystemTrustStore any } @@ -12652,15 +12608,15 @@ type GoogleAdWordsObjectDataset struct { // GetDataset implements the DatasetClassification interface for type GoogleAdWordsObjectDataset. func (g *GoogleAdWordsObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, Description: g.Description, - Structure: g.Structure, - Schema: g.Schema, + Folder: g.Folder, LinkedServiceName: g.LinkedServiceName, Parameters: g.Parameters, - Annotations: g.Annotations, - Folder: g.Folder, - AdditionalProperties: g.AdditionalProperties, + Schema: g.Schema, + Structure: g.Structure, + Type: g.Type, } } @@ -12698,26 +12654,26 @@ type GoogleAdWordsSource struct { // GetCopySource implements the CopySourceClassification interface for type GoogleAdWordsSource. func (g *GoogleAdWordsSource) GetCopySource() *CopySource { return &CopySource{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, SourceRetryCount: g.SourceRetryCount, SourceRetryWait: g.SourceRetryWait, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type GoogleAdWordsSource. func (g *GoogleAdWordsSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: g.QueryTimeout, AdditionalColumns: g.AdditionalColumns, - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, + QueryTimeout: g.QueryTimeout, SourceRetryCount: g.SourceRetryCount, SourceRetryWait: g.SourceRetryWait, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -12760,12 +12716,12 @@ type GoogleBigQueryLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type GoogleBigQueryLinkedService. func (g *GoogleBigQueryLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, ConnectVia: g.ConnectVia, Description: g.Description, Parameters: g.Parameters, - Annotations: g.Annotations, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -12775,10 +12731,10 @@ type GoogleBigQueryLinkedServiceTypeProperties struct { // IR. AuthenticationType *GoogleBigQueryAuthenticationType - // REQUIRED; The default BigQuery project to query against. + // REQUIRED; The default BigQuery project to query against. Type: string (or Expression with resultType string). Project any - // A comma-separated list of public BigQuery projects to access. + // A comma-separated list of public BigQuery projects to access. Type: string (or Expression with resultType string). AdditionalProjects any // The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType @@ -12788,31 +12744,33 @@ type GoogleBigQueryLinkedServiceTypeProperties struct { // The client secret of the google application used to acquire the refresh token. ClientSecret SecretBaseClassification - // The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. + // The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. Type: string + // (or Expression with resultType string). Email any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The full path to the .p12 key file that is used to authenticate the service account email address and can only be used - // on self-hosted IR. + // on self-hosted IR. Type: string (or Expression with resultType string). KeyFilePath any // The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. RefreshToken SecretBaseClassification // Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine - // BigQuery data with data from Google Drive. The default value is false. + // BigQuery data with data from Google Drive. The default value is false. Type: + // string (or Expression with resultType string). RequestGoogleDriveScope any // The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This // property can only be set when using SSL on self-hosted IR. The default value - // is the cacerts.pem file installed with the IR. + // is the cacerts.pem file installed with the IR. Type: string (or Expression with resultType string). TrustedCertPath any // Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is - // false. + // false.Type: boolean (or Expression with resultType boolean). UseSystemTrustStore any } @@ -12853,15 +12811,15 @@ type GoogleBigQueryObjectDataset struct { // GetDataset implements the DatasetClassification interface for type GoogleBigQueryObjectDataset. func (g *GoogleBigQueryObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, Description: g.Description, - Structure: g.Structure, - Schema: g.Schema, + Folder: g.Folder, LinkedServiceName: g.LinkedServiceName, Parameters: g.Parameters, - Annotations: g.Annotations, - Folder: g.Folder, - AdditionalProperties: g.AdditionalProperties, + Schema: g.Schema, + Structure: g.Structure, + Type: g.Type, } } @@ -12899,26 +12857,26 @@ type GoogleBigQuerySource struct { // GetCopySource implements the CopySourceClassification interface for type GoogleBigQuerySource. func (g *GoogleBigQuerySource) GetCopySource() *CopySource { return &CopySource{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, SourceRetryCount: g.SourceRetryCount, SourceRetryWait: g.SourceRetryWait, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type GoogleBigQuerySource. func (g *GoogleBigQuerySource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: g.QueryTimeout, AdditionalColumns: g.AdditionalColumns, - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, + QueryTimeout: g.QueryTimeout, SourceRetryCount: g.SourceRetryCount, SourceRetryWait: g.SourceRetryWait, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -12949,12 +12907,12 @@ type GoogleCloudStorageLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type GoogleCloudStorageLinkedService. func (g *GoogleCloudStorageLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, ConnectVia: g.ConnectVia, Description: g.Description, Parameters: g.Parameters, - Annotations: g.Annotations, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -12965,8 +12923,8 @@ type GoogleCloudStorageLinkedServiceTypeProperties struct { AccessKeyID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. SecretAccessKey SecretBaseClassification @@ -13001,10 +12959,10 @@ type GoogleCloudStorageLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type GoogleCloudStorageLocation. func (g *GoogleCloudStorageLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: g.Type, - FolderPath: g.FolderPath, - FileName: g.FileName, AdditionalProperties: g.AdditionalProperties, + FileName: g.FileName, + FolderPath: g.FolderPath, + Type: g.Type, } } @@ -13023,8 +12981,8 @@ type GoogleCloudStorageReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -13059,10 +13017,10 @@ type GoogleCloudStorageReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type GoogleCloudStorageReadSettings. func (g *GoogleCloudStorageReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: g.Type, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, + Type: g.Type, } } @@ -13093,12 +13051,12 @@ type GoogleSheetsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type GoogleSheetsLinkedService. func (g *GoogleSheetsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, ConnectVia: g.ConnectVia, Description: g.Description, Parameters: g.Parameters, - Annotations: g.Annotations, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -13108,8 +13066,8 @@ type GoogleSheetsLinkedServiceTypeProperties struct { APIToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // GreenplumDatasetTypeProperties - Greenplum Dataset Properties @@ -13151,12 +13109,12 @@ type GreenplumLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type GreenplumLinkedService. func (g *GreenplumLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, ConnectVia: g.ConnectVia, Description: g.Description, Parameters: g.Parameters, - Annotations: g.Annotations, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -13166,8 +13124,8 @@ type GreenplumLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Pwd *AzureKeyVaultSecretReference @@ -13207,26 +13165,26 @@ type GreenplumSource struct { // GetCopySource implements the CopySourceClassification interface for type GreenplumSource. func (g *GreenplumSource) GetCopySource() *CopySource { return &CopySource{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, SourceRetryCount: g.SourceRetryCount, SourceRetryWait: g.SourceRetryWait, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type GreenplumSource. func (g *GreenplumSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: g.QueryTimeout, AdditionalColumns: g.AdditionalColumns, - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + DisableMetricsCollection: g.DisableMetricsCollection, + MaxConcurrentConnections: g.MaxConcurrentConnections, + QueryTimeout: g.QueryTimeout, SourceRetryCount: g.SourceRetryCount, SourceRetryWait: g.SourceRetryWait, - MaxConcurrentConnections: g.MaxConcurrentConnections, - DisableMetricsCollection: g.DisableMetricsCollection, - AdditionalProperties: g.AdditionalProperties, + Type: g.Type, } } @@ -13267,15 +13225,15 @@ type GreenplumTableDataset struct { // GetDataset implements the DatasetClassification interface for type GreenplumTableDataset. func (g *GreenplumTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: g.Type, + AdditionalProperties: g.AdditionalProperties, + Annotations: g.Annotations, Description: g.Description, - Structure: g.Structure, - Schema: g.Schema, + Folder: g.Folder, LinkedServiceName: g.LinkedServiceName, Parameters: g.Parameters, - Annotations: g.Annotations, - Folder: g.Folder, - AdditionalProperties: g.AdditionalProperties, + Schema: g.Schema, + Structure: g.Structure, + Type: g.Type, } } @@ -13306,12 +13264,12 @@ type HBaseLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HBaseLinkedService. func (h *HBaseLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -13334,8 +13292,8 @@ type HBaseLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) HTTPPath any @@ -13392,15 +13350,15 @@ type HBaseObjectDataset struct { // GetDataset implements the DatasetClassification interface for type HBaseObjectDataset. func (h *HBaseObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, Description: h.Description, - Structure: h.Structure, - Schema: h.Schema, + Folder: h.Folder, LinkedServiceName: h.LinkedServiceName, Parameters: h.Parameters, - Annotations: h.Annotations, - Folder: h.Folder, - AdditionalProperties: h.AdditionalProperties, + Schema: h.Schema, + Structure: h.Structure, + Type: h.Type, } } @@ -13438,26 +13396,26 @@ type HBaseSource struct { // GetCopySource implements the CopySourceClassification interface for type HBaseSource. func (h *HBaseSource) GetCopySource() *CopySource { return &CopySource{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type HBaseSource. func (h *HBaseSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: h.QueryTimeout, AdditionalColumns: h.AdditionalColumns, - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, + QueryTimeout: h.QueryTimeout, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -13484,9 +13442,16 @@ type HDInsightHiveActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -13494,26 +13459,30 @@ type HDInsightHiveActivity struct { // GetActivity implements the ActivityClassification interface for type HDInsightHiveActivity. func (h *HDInsightHiveActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type HDInsightHiveActivity. func (h *HDInsightHiveActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, LinkedServiceName: h.LinkedServiceName, - Policy: h.Policy, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + Policy: h.Policy, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } @@ -13541,7 +13510,7 @@ type HDInsightHiveActivityTypeProperties struct { StorageLinkedServices []*LinkedServiceReference // User specified arguments under hivevar namespace. - Variables []any + Variables map[string]any } // HDInsightLinkedService - HDInsight linked service. @@ -13571,12 +13540,12 @@ type HDInsightLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HDInsightLinkedService. func (h *HDInsightLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -13586,8 +13555,8 @@ type HDInsightLinkedServiceTypeProperties struct { ClusterURI any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType // string). @@ -13632,9 +13601,16 @@ type HDInsightMapReduceActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -13642,26 +13618,30 @@ type HDInsightMapReduceActivity struct { // GetActivity implements the ActivityClassification interface for type HDInsightMapReduceActivity. func (h *HDInsightMapReduceActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type HDInsightMapReduceActivity. func (h *HDInsightMapReduceActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, LinkedServiceName: h.LinkedServiceName, - Policy: h.Policy, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + Policy: h.Policy, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } @@ -13719,12 +13699,12 @@ type HDInsightOnDemandLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HDInsightOnDemandLinkedService. func (h *HDInsightOnDemandLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -13786,8 +13766,8 @@ type HDInsightOnDemandLinkedServiceTypeProperties struct { DataNodeSize any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. HBaseConfiguration any @@ -13865,9 +13845,16 @@ type HDInsightPigActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -13875,26 +13862,30 @@ type HDInsightPigActivity struct { // GetActivity implements the ActivityClassification interface for type HDInsightPigActivity. func (h *HDInsightPigActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type HDInsightPigActivity. func (h *HDInsightPigActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, LinkedServiceName: h.LinkedServiceName, - Policy: h.Policy, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + Policy: h.Policy, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } @@ -13942,9 +13933,16 @@ type HDInsightSparkActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -13952,26 +13950,30 @@ type HDInsightSparkActivity struct { // GetActivity implements the ActivityClassification interface for type HDInsightSparkActivity. func (h *HDInsightSparkActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type HDInsightSparkActivity. func (h *HDInsightSparkActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, LinkedServiceName: h.LinkedServiceName, - Policy: h.Policy, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + Policy: h.Policy, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } @@ -14027,9 +14029,16 @@ type HDInsightStreamingActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -14037,26 +14046,30 @@ type HDInsightStreamingActivity struct { // GetActivity implements the ActivityClassification interface for type HDInsightStreamingActivity. func (h *HDInsightStreamingActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type HDInsightStreamingActivity. func (h *HDInsightStreamingActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: h.AdditionalProperties, + DependsOn: h.DependsOn, + Description: h.Description, LinkedServiceName: h.LinkedServiceName, - Policy: h.Policy, Name: h.Name, + OnInactiveMarkAs: h.OnInactiveMarkAs, + Policy: h.Policy, + State: h.State, Type: h.Type, - Description: h.Description, - DependsOn: h.DependsOn, UserProperties: h.UserProperties, - AdditionalProperties: h.AdditionalProperties, } } @@ -14136,15 +14149,15 @@ type HTTPDataset struct { // GetDataset implements the DatasetClassification interface for type HTTPDataset. func (h *HTTPDataset) GetDataset() *Dataset { return &Dataset{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, Description: h.Description, - Structure: h.Structure, - Schema: h.Schema, + Folder: h.Folder, LinkedServiceName: h.LinkedServiceName, Parameters: h.Parameters, - Annotations: h.Annotations, - Folder: h.Folder, - AdditionalProperties: h.AdditionalProperties, + Schema: h.Schema, + Structure: h.Structure, + Type: h.Type, } } @@ -14198,12 +14211,12 @@ type HTTPLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HTTPLinkedService. func (h *HTTPLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14235,8 +14248,8 @@ type HTTPLinkedServiceTypeProperties struct { EnableServerCertificateValidation any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. Password SecretBaseClassification @@ -14245,11 +14258,15 @@ type HTTPLinkedServiceTypeProperties struct { UserName any } -// HTTPReadSettings - Sftp read settings. +// HTTPReadSettings - Http read settings. type HTTPReadSettings struct { // REQUIRED; The read setting type. Type *string + // Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with + // resultType array of objects). + AdditionalColumns any + // The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). AdditionalHeaders any @@ -14259,32 +14276,27 @@ type HTTPReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool - // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any - // Specify the root path where partition discovery starts from. Type: string (or Expression with resultType string). - PartitionRootPath any - // The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). RequestBody any // The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). RequestMethod any - // Specifies the timeout for a HTTP client to get HTTP response from HTTP server. + // Specifies the timeout for a HTTP client to get HTTP response from HTTP server. Type: string (or Expression with resultType + // string). RequestTimeout any } // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type HTTPReadSettings. func (h *HTTPReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: h.Type, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, + Type: h.Type, } } @@ -14309,10 +14321,10 @@ type HTTPServerLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type HTTPServerLocation. func (h *HTTPServerLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: h.Type, - FolderPath: h.FolderPath, - FileName: h.FileName, AdditionalProperties: h.AdditionalProperties, + FileName: h.FileName, + FolderPath: h.FolderPath, + Type: h.Type, } } @@ -14345,12 +14357,12 @@ type HTTPSource struct { // GetCopySource implements the CopySourceClassification interface for type HTTPSource. func (h *HTTPSource) GetCopySource() *CopySource { return &CopySource{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14381,12 +14393,12 @@ type HdfsLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HdfsLinkedService. func (h *HdfsLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14401,8 +14413,8 @@ type HdfsLinkedServiceTypeProperties struct { AuthenticationType any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for Windows authentication. Password SecretBaseClassification @@ -14429,10 +14441,10 @@ type HdfsLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type HdfsLocation. func (h *HdfsLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: h.Type, - FolderPath: h.FolderPath, - FileName: h.FileName, AdditionalProperties: h.AdditionalProperties, + FileName: h.FileName, + FolderPath: h.FolderPath, + Type: h.Type, } } @@ -14454,8 +14466,8 @@ type HdfsReadSettings struct { // Specifies Distcp-related settings. DistcpSettings *DistcpSettings - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -14487,10 +14499,10 @@ type HdfsReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type HdfsReadSettings. func (h *HdfsReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: h.Type, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, + Type: h.Type, } } @@ -14525,12 +14537,12 @@ type HdfsSource struct { // GetCopySource implements the CopySourceClassification interface for type HdfsSource. func (h *HdfsSource) GetCopySource() *CopySource { return &CopySource{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14573,12 +14585,12 @@ type HiveLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HiveLinkedService. func (h *HiveLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14602,8 +14614,8 @@ type HiveLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The partial URL corresponding to the Hive server. HTTPPath any @@ -14679,15 +14691,15 @@ type HiveObjectDataset struct { // GetDataset implements the DatasetClassification interface for type HiveObjectDataset. func (h *HiveObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, Description: h.Description, - Structure: h.Structure, - Schema: h.Schema, + Folder: h.Folder, LinkedServiceName: h.LinkedServiceName, Parameters: h.Parameters, - Annotations: h.Annotations, - Folder: h.Folder, - AdditionalProperties: h.AdditionalProperties, + Schema: h.Schema, + Structure: h.Structure, + Type: h.Type, } } @@ -14725,26 +14737,26 @@ type HiveSource struct { // GetCopySource implements the CopySourceClassification interface for type HiveSource. func (h *HiveSource) GetCopySource() *CopySource { return &CopySource{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type HiveSource. func (h *HiveSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: h.QueryTimeout, AdditionalColumns: h.AdditionalColumns, - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, + QueryTimeout: h.QueryTimeout, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14775,12 +14787,12 @@ type HubspotLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type HubspotLinkedService. func (h *HubspotLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, ConnectVia: h.ConnectVia, Description: h.Description, Parameters: h.Parameters, - Annotations: h.Annotations, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14796,8 +14808,8 @@ type HubspotLinkedServiceTypeProperties struct { ClientSecret SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The refresh token obtained when initially authenticating your OAuth integration. RefreshToken SecretBaseClassification @@ -14850,15 +14862,15 @@ type HubspotObjectDataset struct { // GetDataset implements the DatasetClassification interface for type HubspotObjectDataset. func (h *HubspotObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + Annotations: h.Annotations, Description: h.Description, - Structure: h.Structure, - Schema: h.Schema, + Folder: h.Folder, LinkedServiceName: h.LinkedServiceName, Parameters: h.Parameters, - Annotations: h.Annotations, - Folder: h.Folder, - AdditionalProperties: h.AdditionalProperties, + Schema: h.Schema, + Structure: h.Structure, + Type: h.Type, } } @@ -14896,26 +14908,26 @@ type HubspotSource struct { // GetCopySource implements the CopySourceClassification interface for type HubspotSource. func (h *HubspotSource) GetCopySource() *CopySource { return &CopySource{ - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type HubspotSource. func (h *HubspotSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: h.QueryTimeout, AdditionalColumns: h.AdditionalColumns, - Type: h.Type, + AdditionalProperties: h.AdditionalProperties, + DisableMetricsCollection: h.DisableMetricsCollection, + MaxConcurrentConnections: h.MaxConcurrentConnections, + QueryTimeout: h.QueryTimeout, SourceRetryCount: h.SourceRetryCount, SourceRetryWait: h.SourceRetryWait, - MaxConcurrentConnections: h.MaxConcurrentConnections, - DisableMetricsCollection: h.DisableMetricsCollection, - AdditionalProperties: h.AdditionalProperties, + Type: h.Type, } } @@ -14940,6 +14952,13 @@ type IfConditionActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -14947,24 +14966,28 @@ type IfConditionActivity struct { // GetActivity implements the ActivityClassification interface for type IfConditionActivity. func (i *IfConditionActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: i.AdditionalProperties, + DependsOn: i.DependsOn, + Description: i.Description, Name: i.Name, + OnInactiveMarkAs: i.OnInactiveMarkAs, + State: i.State, Type: i.Type, - Description: i.Description, - DependsOn: i.DependsOn, UserProperties: i.UserProperties, - AdditionalProperties: i.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type IfConditionActivity. func (i *IfConditionActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: i.AdditionalProperties, + DependsOn: i.DependsOn, + Description: i.Description, Name: i.Name, + OnInactiveMarkAs: i.OnInactiveMarkAs, + State: i.State, Type: i.Type, - Description: i.Description, - DependsOn: i.DependsOn, UserProperties: i.UserProperties, - AdditionalProperties: i.AdditionalProperties, } } @@ -15022,12 +15045,12 @@ type ImpalaLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ImpalaLinkedService. func (i *ImpalaLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + Annotations: i.Annotations, ConnectVia: i.ConnectVia, Description: i.Description, Parameters: i.Parameters, - Annotations: i.Annotations, - AdditionalProperties: i.AdditionalProperties, + Type: i.Type, } } @@ -15050,8 +15073,8 @@ type ImpalaLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password corresponding to the user name when using UsernameAndPassword. Password SecretBaseClassification @@ -15109,15 +15132,15 @@ type ImpalaObjectDataset struct { // GetDataset implements the DatasetClassification interface for type ImpalaObjectDataset. func (i *ImpalaObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + Annotations: i.Annotations, Description: i.Description, - Structure: i.Structure, - Schema: i.Schema, + Folder: i.Folder, LinkedServiceName: i.LinkedServiceName, Parameters: i.Parameters, - Annotations: i.Annotations, - Folder: i.Folder, - AdditionalProperties: i.AdditionalProperties, + Schema: i.Schema, + Structure: i.Structure, + Type: i.Type, } } @@ -15155,38 +15178,29 @@ type ImpalaSource struct { // GetCopySource implements the CopySourceClassification interface for type ImpalaSource. func (i *ImpalaSource) GetCopySource() *CopySource { return &CopySource{ - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + DisableMetricsCollection: i.DisableMetricsCollection, + MaxConcurrentConnections: i.MaxConcurrentConnections, SourceRetryCount: i.SourceRetryCount, SourceRetryWait: i.SourceRetryWait, - MaxConcurrentConnections: i.MaxConcurrentConnections, - DisableMetricsCollection: i.DisableMetricsCollection, - AdditionalProperties: i.AdditionalProperties, + Type: i.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type ImpalaSource. func (i *ImpalaSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: i.QueryTimeout, AdditionalColumns: i.AdditionalColumns, - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + DisableMetricsCollection: i.DisableMetricsCollection, + MaxConcurrentConnections: i.MaxConcurrentConnections, + QueryTimeout: i.QueryTimeout, SourceRetryCount: i.SourceRetryCount, SourceRetryWait: i.SourceRetryWait, - MaxConcurrentConnections: i.MaxConcurrentConnections, - DisableMetricsCollection: i.DisableMetricsCollection, - AdditionalProperties: i.AdditionalProperties, + Type: i.Type, } } -// ImportSettingsClassification provides polymorphic access to related types. -// Call the interface's GetImportSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AzureDatabricksDeltaLakeImportCommand, *ImportSettings, *SnowflakeImportCopyCommand -type ImportSettingsClassification interface { - // GetImportSettings returns the ImportSettings content of the underlying type. - GetImportSettings() *ImportSettings -} - // ImportSettings - Import command settings. type ImportSettings struct { // REQUIRED; The import setting type. @@ -15226,19 +15240,20 @@ type InformixLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type InformixLinkedService. func (i *InformixLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + Annotations: i.Annotations, ConnectVia: i.ConnectVia, Description: i.Description, Parameters: i.Parameters, - Annotations: i.Annotations, - AdditionalProperties: i.AdditionalProperties, + Type: i.Type, } } // InformixLinkedServiceTypeProperties - Informix linked service properties. type InformixLinkedServiceTypeProperties struct { // REQUIRED; The non-access credential portion of the connection string as well as an optional encrypted credential. Type: - // string, SecureString or AzureKeyVaultSecretReference. + // string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType + // string. ConnectionString any // Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: @@ -15249,8 +15264,8 @@ type InformixLinkedServiceTypeProperties struct { Credential SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for Basic authentication. Password SecretBaseClassification @@ -15292,14 +15307,14 @@ type InformixSink struct { // GetCopySink implements the CopySinkClassification interface for type InformixSink. func (i *InformixSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: i.AdditionalProperties, + DisableMetricsCollection: i.DisableMetricsCollection, + MaxConcurrentConnections: i.MaxConcurrentConnections, + SinkRetryCount: i.SinkRetryCount, + SinkRetryWait: i.SinkRetryWait, Type: i.Type, WriteBatchSize: i.WriteBatchSize, WriteBatchTimeout: i.WriteBatchTimeout, - SinkRetryCount: i.SinkRetryCount, - SinkRetryWait: i.SinkRetryWait, - MaxConcurrentConnections: i.MaxConcurrentConnections, - DisableMetricsCollection: i.DisableMetricsCollection, - AdditionalProperties: i.AdditionalProperties, } } @@ -15337,26 +15352,26 @@ type InformixSource struct { // GetCopySource implements the CopySourceClassification interface for type InformixSource. func (i *InformixSource) GetCopySource() *CopySource { return &CopySource{ - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + DisableMetricsCollection: i.DisableMetricsCollection, + MaxConcurrentConnections: i.MaxConcurrentConnections, SourceRetryCount: i.SourceRetryCount, SourceRetryWait: i.SourceRetryWait, - MaxConcurrentConnections: i.MaxConcurrentConnections, - DisableMetricsCollection: i.DisableMetricsCollection, - AdditionalProperties: i.AdditionalProperties, + Type: i.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type InformixSource. func (i *InformixSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: i.QueryTimeout, AdditionalColumns: i.AdditionalColumns, - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + DisableMetricsCollection: i.DisableMetricsCollection, + MaxConcurrentConnections: i.MaxConcurrentConnections, + QueryTimeout: i.QueryTimeout, SourceRetryCount: i.SourceRetryCount, SourceRetryWait: i.SourceRetryWait, - MaxConcurrentConnections: i.MaxConcurrentConnections, - DisableMetricsCollection: i.DisableMetricsCollection, - AdditionalProperties: i.AdditionalProperties, + Type: i.Type, } } @@ -15397,15 +15412,15 @@ type InformixTableDataset struct { // GetDataset implements the DatasetClassification interface for type InformixTableDataset. func (i *InformixTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: i.Type, + AdditionalProperties: i.AdditionalProperties, + Annotations: i.Annotations, Description: i.Description, - Structure: i.Structure, - Schema: i.Schema, + Folder: i.Folder, LinkedServiceName: i.LinkedServiceName, Parameters: i.Parameters, - Annotations: i.Annotations, - Folder: i.Folder, - AdditionalProperties: i.AdditionalProperties, + Schema: i.Schema, + Structure: i.Structure, + Type: i.Type, } } @@ -15415,15 +15430,6 @@ type InformixTableDatasetTypeProperties struct { TableName any } -// IntegrationRuntimeClassification provides polymorphic access to related types. -// Call the interface's GetIntegrationRuntime() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *IntegrationRuntime, *ManagedIntegrationRuntime, *SelfHostedIntegrationRuntime -type IntegrationRuntimeClassification interface { - // GetIntegrationRuntime returns the IntegrationRuntime content of the underlying type. - GetIntegrationRuntime() *IntegrationRuntime -} - // IntegrationRuntime - Azure Data Factory nested object which serves as a compute resource for activities. type IntegrationRuntime struct { // REQUIRED; Type of integration runtime. @@ -15533,10 +15539,21 @@ type IntegrationRuntimeDataFlowProperties struct { // Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. CoreCount *int32 + // Custom properties are used to tune the data flow runtime performance. + CustomProperties []*IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem + // Time to live (in minutes) setting of the cluster which will execute data flow job. TimeToLive *int32 } +type IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem struct { + // Name of custom property. + Name *string + + // Value of custom property. + Value *string +} + // IntegrationRuntimeDataProxyProperties - Data proxy properties for a managed dedicated integration runtime. type IntegrationRuntimeDataProxyProperties struct { // The self-hosted integration runtime reference. @@ -15612,43 +15629,6 @@ type IntegrationRuntimeNodeMonitoringData struct { SentBytes *float32 } -// IntegrationRuntimeNodesClientDeleteOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Delete -// method. -type IntegrationRuntimeNodesClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimeNodesClientGetIPAddressOptions contains the optional parameters for the IntegrationRuntimeNodesClient.GetIPAddress -// method. -type IntegrationRuntimeNodesClientGetIPAddressOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimeNodesClientGetOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Get method. -type IntegrationRuntimeNodesClientGetOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimeNodesClientUpdateOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Update -// method. -type IntegrationRuntimeNodesClientUpdateOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimeObjectMetadataClientBeginRefreshOptions contains the optional parameters for the IntegrationRuntimeObjectMetadataClient.BeginRefresh -// method. -type IntegrationRuntimeObjectMetadataClientBeginRefreshOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// IntegrationRuntimeObjectMetadataClientGetOptions contains the optional parameters for the IntegrationRuntimeObjectMetadataClient.Get -// method. -type IntegrationRuntimeObjectMetadataClientGetOptions struct { - // The parameters for getting a SSIS object metadata. - GetMetadataRequest *GetSsisObjectMetadataRequest -} - // IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint - Azure-SSIS integration runtime outbound network dependency // endpoints for one category. type IntegrationRuntimeOutboundNetworkDependenciesCategoryEndpoint struct { @@ -15770,15 +15750,6 @@ type IntegrationRuntimeSsisProperties struct { PackageStores []*PackageStore } -// IntegrationRuntimeStatusClassification provides polymorphic access to related types. -// Call the interface's GetIntegrationRuntimeStatus() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *IntegrationRuntimeStatus, *ManagedIntegrationRuntimeStatus, *SelfHostedIntegrationRuntimeStatus -type IntegrationRuntimeStatusClassification interface { - // GetIntegrationRuntimeStatus returns the IntegrationRuntimeStatus content of the underlying type. - GetIntegrationRuntimeStatus() *IntegrationRuntimeStatus -} - // IntegrationRuntimeStatus - Integration runtime status. type IntegrationRuntimeStatus struct { // REQUIRED; Type of integration runtime. @@ -15811,121 +15782,17 @@ type IntegrationRuntimeVNetProperties struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. AdditionalProperties map[string]any - // Resource IDs of the public IP addresses that this integration runtime will use. - PublicIPs []*string - - // The name of the subnet this integration runtime will join. - Subnet *string - - // The ID of subnet, to which this Azure-SSIS integration runtime will be joined. - SubnetID *string - - // The ID of the VNet that this integration runtime will join. - VNetID *string -} - -// IntegrationRuntimesClientBeginStartOptions contains the optional parameters for the IntegrationRuntimesClient.BeginStart -// method. -type IntegrationRuntimesClientBeginStartOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// IntegrationRuntimesClientBeginStopOptions contains the optional parameters for the IntegrationRuntimesClient.BeginStop -// method. -type IntegrationRuntimesClientBeginStopOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// IntegrationRuntimesClientCreateLinkedIntegrationRuntimeOptions contains the optional parameters for the IntegrationRuntimesClient.CreateLinkedIntegrationRuntime -// method. -type IntegrationRuntimesClientCreateLinkedIntegrationRuntimeOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientCreateOrUpdateOptions contains the optional parameters for the IntegrationRuntimesClient.CreateOrUpdate -// method. -type IntegrationRuntimesClientCreateOrUpdateOptions struct { - // ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity - // or can be * for unconditional update. - IfMatch *string -} - -// IntegrationRuntimesClientDeleteOptions contains the optional parameters for the IntegrationRuntimesClient.Delete method. -type IntegrationRuntimesClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientGetConnectionInfoOptions contains the optional parameters for the IntegrationRuntimesClient.GetConnectionInfo -// method. -type IntegrationRuntimesClientGetConnectionInfoOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientGetMonitoringDataOptions contains the optional parameters for the IntegrationRuntimesClient.GetMonitoringData -// method. -type IntegrationRuntimesClientGetMonitoringDataOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientGetOptions contains the optional parameters for the IntegrationRuntimesClient.Get method. -type IntegrationRuntimesClientGetOptions struct { - // ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, - // or if * was provided, then no content will be returned. - IfNoneMatch *string -} - -// IntegrationRuntimesClientGetStatusOptions contains the optional parameters for the IntegrationRuntimesClient.GetStatus -// method. -type IntegrationRuntimesClientGetStatusOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientListAuthKeysOptions contains the optional parameters for the IntegrationRuntimesClient.ListAuthKeys -// method. -type IntegrationRuntimesClientListAuthKeysOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientListByFactoryOptions contains the optional parameters for the IntegrationRuntimesClient.NewListByFactoryPager -// method. -type IntegrationRuntimesClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsOptions contains the optional parameters for the IntegrationRuntimesClient.ListOutboundNetworkDependenciesEndpoints -// method. -type IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientRegenerateAuthKeyOptions contains the optional parameters for the IntegrationRuntimesClient.RegenerateAuthKey -// method. -type IntegrationRuntimesClientRegenerateAuthKeyOptions struct { - // placeholder for future optional parameters -} - -// IntegrationRuntimesClientRemoveLinksOptions contains the optional parameters for the IntegrationRuntimesClient.RemoveLinks -// method. -type IntegrationRuntimesClientRemoveLinksOptions struct { - // placeholder for future optional parameters -} + // Resource IDs of the public IP addresses that this integration runtime will use. + PublicIPs []*string -// IntegrationRuntimesClientSyncCredentialsOptions contains the optional parameters for the IntegrationRuntimesClient.SyncCredentials -// method. -type IntegrationRuntimesClientSyncCredentialsOptions struct { - // placeholder for future optional parameters -} + // The name of the subnet this integration runtime will join. + Subnet *string -// IntegrationRuntimesClientUpdateOptions contains the optional parameters for the IntegrationRuntimesClient.Update method. -type IntegrationRuntimesClientUpdateOptions struct { - // placeholder for future optional parameters -} + // The ID of subnet, to which this Azure-SSIS integration runtime will be joined. + SubnetID *string -// IntegrationRuntimesClientUpgradeOptions contains the optional parameters for the IntegrationRuntimesClient.Upgrade method. -type IntegrationRuntimesClientUpgradeOptions struct { - // placeholder for future optional parameters + // The ID of the VNet that this integration runtime will join. + VNetID *string } // JSONDataset - Json dataset. @@ -15965,15 +15832,15 @@ type JSONDataset struct { // GetDataset implements the DatasetClassification interface for type JSONDataset. func (j *JSONDataset) GetDataset() *Dataset { return &Dataset{ - Type: j.Type, + AdditionalProperties: j.AdditionalProperties, + Annotations: j.Annotations, Description: j.Description, - Structure: j.Structure, - Schema: j.Schema, + Folder: j.Folder, LinkedServiceName: j.LinkedServiceName, Parameters: j.Parameters, - Annotations: j.Annotations, - Folder: j.Folder, - AdditionalProperties: j.AdditionalProperties, + Schema: j.Schema, + Structure: j.Structure, + Type: j.Type, } } @@ -16034,10 +15901,10 @@ type JSONFormat struct { // GetDatasetStorageFormat implements the DatasetStorageFormatClassification interface for type JSONFormat. func (j *JSONFormat) GetDatasetStorageFormat() *DatasetStorageFormat { return &DatasetStorageFormat{ - Type: j.Type, - Serializer: j.Serializer, - Deserializer: j.Deserializer, AdditionalProperties: j.AdditionalProperties, + Deserializer: j.Deserializer, + Serializer: j.Serializer, + Type: j.Type, } } @@ -16056,8 +15923,8 @@ type JSONReadSettings struct { // GetFormatReadSettings implements the FormatReadSettingsClassification interface for type JSONReadSettings. func (j *JSONReadSettings) GetFormatReadSettings() *FormatReadSettings { return &FormatReadSettings{ - Type: j.Type, AdditionalProperties: j.AdditionalProperties, + Type: j.Type, } } @@ -16097,14 +15964,14 @@ type JSONSink struct { // GetCopySink implements the CopySinkClassification interface for type JSONSink. func (j *JSONSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: j.AdditionalProperties, + DisableMetricsCollection: j.DisableMetricsCollection, + MaxConcurrentConnections: j.MaxConcurrentConnections, + SinkRetryCount: j.SinkRetryCount, + SinkRetryWait: j.SinkRetryWait, Type: j.Type, WriteBatchSize: j.WriteBatchSize, WriteBatchTimeout: j.WriteBatchTimeout, - SinkRetryCount: j.SinkRetryCount, - SinkRetryWait: j.SinkRetryWait, - MaxConcurrentConnections: j.MaxConcurrentConnections, - DisableMetricsCollection: j.DisableMetricsCollection, - AdditionalProperties: j.AdditionalProperties, } } @@ -16142,12 +16009,12 @@ type JSONSource struct { // GetCopySource implements the CopySourceClassification interface for type JSONSource. func (j *JSONSource) GetCopySource() *CopySource { return &CopySource{ - Type: j.Type, + AdditionalProperties: j.AdditionalProperties, + DisableMetricsCollection: j.DisableMetricsCollection, + MaxConcurrentConnections: j.MaxConcurrentConnections, SourceRetryCount: j.SourceRetryCount, SourceRetryWait: j.SourceRetryWait, - MaxConcurrentConnections: j.MaxConcurrentConnections, - DisableMetricsCollection: j.DisableMetricsCollection, - AdditionalProperties: j.AdditionalProperties, + Type: j.Type, } } @@ -16167,8 +16034,8 @@ type JSONWriteSettings struct { // GetFormatWriteSettings implements the FormatWriteSettingsClassification interface for type JSONWriteSettings. func (j *JSONWriteSettings) GetFormatWriteSettings() *FormatWriteSettings { return &FormatWriteSettings{ - Type: j.Type, AdditionalProperties: j.AdditionalProperties, + Type: j.Type, } } @@ -16199,12 +16066,12 @@ type JiraLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type JiraLinkedService. func (j *JiraLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: j.Type, + AdditionalProperties: j.AdditionalProperties, + Annotations: j.Annotations, ConnectVia: j.ConnectVia, Description: j.Description, Parameters: j.Parameters, - Annotations: j.Annotations, - AdditionalProperties: j.AdditionalProperties, + Type: j.Type, } } @@ -16217,8 +16084,8 @@ type JiraLinkedServiceTypeProperties struct { Username any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password corresponding to the user name that you provided in the username field. Password SecretBaseClassification @@ -16275,15 +16142,15 @@ type JiraObjectDataset struct { // GetDataset implements the DatasetClassification interface for type JiraObjectDataset. func (j *JiraObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: j.Type, + AdditionalProperties: j.AdditionalProperties, + Annotations: j.Annotations, Description: j.Description, - Structure: j.Structure, - Schema: j.Schema, + Folder: j.Folder, LinkedServiceName: j.LinkedServiceName, Parameters: j.Parameters, - Annotations: j.Annotations, - Folder: j.Folder, - AdditionalProperties: j.AdditionalProperties, + Schema: j.Schema, + Structure: j.Structure, + Type: j.Type, } } @@ -16321,26 +16188,26 @@ type JiraSource struct { // GetCopySource implements the CopySourceClassification interface for type JiraSource. func (j *JiraSource) GetCopySource() *CopySource { return &CopySource{ - Type: j.Type, + AdditionalProperties: j.AdditionalProperties, + DisableMetricsCollection: j.DisableMetricsCollection, + MaxConcurrentConnections: j.MaxConcurrentConnections, SourceRetryCount: j.SourceRetryCount, SourceRetryWait: j.SourceRetryWait, - MaxConcurrentConnections: j.MaxConcurrentConnections, - DisableMetricsCollection: j.DisableMetricsCollection, - AdditionalProperties: j.AdditionalProperties, + Type: j.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type JiraSource. func (j *JiraSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: j.QueryTimeout, AdditionalColumns: j.AdditionalColumns, - Type: j.Type, + AdditionalProperties: j.AdditionalProperties, + DisableMetricsCollection: j.DisableMetricsCollection, + MaxConcurrentConnections: j.MaxConcurrentConnections, + QueryTimeout: j.QueryTimeout, SourceRetryCount: j.SourceRetryCount, SourceRetryWait: j.SourceRetryWait, - MaxConcurrentConnections: j.MaxConcurrentConnections, - DisableMetricsCollection: j.DisableMetricsCollection, - AdditionalProperties: j.AdditionalProperties, + Type: j.Type, } } @@ -16412,15 +16279,6 @@ type LinkedIntegrationRuntimeRequest struct { LinkedFactoryName *string } -// LinkedIntegrationRuntimeTypeClassification provides polymorphic access to related types. -// Call the interface's GetLinkedIntegrationRuntimeType() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *LinkedIntegrationRuntimeKeyAuthorization, *LinkedIntegrationRuntimeRbacAuthorization, *LinkedIntegrationRuntimeType -type LinkedIntegrationRuntimeTypeClassification interface { - // GetLinkedIntegrationRuntimeType returns the LinkedIntegrationRuntimeType content of the underlying type. - GetLinkedIntegrationRuntimeType() *LinkedIntegrationRuntimeType -} - // LinkedIntegrationRuntimeType - The base definition of a linked integration runtime. type LinkedIntegrationRuntimeType struct { // REQUIRED; The authorization type for integration runtime sharing. @@ -16432,38 +16290,6 @@ func (l *LinkedIntegrationRuntimeType) GetLinkedIntegrationRuntimeType() *Linked return l } -// LinkedServiceClassification provides polymorphic access to related types. -// Call the interface's GetLinkedService() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AmazonMWSLinkedService, *AmazonRdsForOracleLinkedService, *AmazonRdsForSQLServerLinkedService, *AmazonRedshiftLinkedService, -// - *AmazonS3CompatibleLinkedService, *AmazonS3LinkedService, *AppFiguresLinkedService, *AsanaLinkedService, *AzureBatchLinkedService, -// - *AzureBlobFSLinkedService, *AzureBlobStorageLinkedService, *AzureDataExplorerLinkedService, *AzureDataLakeAnalyticsLinkedService, -// - *AzureDataLakeStoreLinkedService, *AzureDatabricksDeltaLakeLinkedService, *AzureDatabricksLinkedService, *AzureFileStorageLinkedService, -// - *AzureFunctionLinkedService, *AzureKeyVaultLinkedService, *AzureMLLinkedService, *AzureMLServiceLinkedService, *AzureMariaDBLinkedService, -// - *AzureMySQLLinkedService, *AzurePostgreSQLLinkedService, *AzureSQLDWLinkedService, *AzureSQLDatabaseLinkedService, *AzureSQLMILinkedService, -// - *AzureSearchLinkedService, *AzureStorageLinkedService, *AzureSynapseArtifactsLinkedService, *AzureTableStorageLinkedService, -// - *CassandraLinkedService, *CommonDataServiceForAppsLinkedService, *ConcurLinkedService, *CosmosDbLinkedService, *CosmosDbMongoDbAPILinkedService, -// - *CouchbaseLinkedService, *CustomDataSourceLinkedService, *DataworldLinkedService, *Db2LinkedService, *DrillLinkedService, -// - *DynamicsAXLinkedService, *DynamicsCrmLinkedService, *DynamicsLinkedService, *EloquaLinkedService, *FileServerLinkedService, -// - *FtpServerLinkedService, *GoogleAdWordsLinkedService, *GoogleBigQueryLinkedService, *GoogleCloudStorageLinkedService, -// - *GoogleSheetsLinkedService, *GreenplumLinkedService, *HBaseLinkedService, *HDInsightLinkedService, *HDInsightOnDemandLinkedService, -// - *HTTPLinkedService, *HdfsLinkedService, *HiveLinkedService, *HubspotLinkedService, *ImpalaLinkedService, *InformixLinkedService, -// - *JiraLinkedService, *LinkedService, *MagentoLinkedService, *MariaDBLinkedService, *MarketoLinkedService, *MicrosoftAccessLinkedService, -// - *MongoDbAtlasLinkedService, *MongoDbLinkedService, *MongoDbV2LinkedService, *MySQLLinkedService, *NetezzaLinkedService, -// - *ODataLinkedService, *OdbcLinkedService, *Office365LinkedService, *OracleCloudStorageLinkedService, *OracleLinkedService, -// - *OracleServiceCloudLinkedService, *PaypalLinkedService, *PhoenixLinkedService, *PostgreSQLLinkedService, *PrestoLinkedService, -// - *QuickBooksLinkedService, *QuickbaseLinkedService, *ResponsysLinkedService, *RestServiceLinkedService, *SQLServerLinkedService, -// - *SalesforceLinkedService, *SalesforceMarketingCloudLinkedService, *SalesforceServiceCloudLinkedService, *SapBWLinkedService, -// - *SapCloudForCustomerLinkedService, *SapEccLinkedService, *SapHanaLinkedService, *SapOdpLinkedService, *SapOpenHubLinkedService, -// - *SapTableLinkedService, *ServiceNowLinkedService, *SftpServerLinkedService, *SharePointOnlineListLinkedService, *ShopifyLinkedService, -// - *SmartsheetLinkedService, *SnowflakeLinkedService, *SparkLinkedService, *SquareLinkedService, *SybaseLinkedService, *TeamDeskLinkedService, -// - *TeradataLinkedService, *TwilioLinkedService, *VerticaLinkedService, *WebLinkedService, *XeroLinkedService, *ZendeskLinkedService, -// - *ZohoLinkedService -type LinkedServiceClassification interface { - // GetLinkedService returns the LinkedService content of the underlying type. - GetLinkedService() *LinkedService -} - // LinkedService - The nested object which contains the information and credential which can be used to connect with related // store or compute resource. type LinkedService struct { @@ -16537,32 +16363,6 @@ type LinkedServiceResource struct { Type *string } -// LinkedServicesClientCreateOrUpdateOptions contains the optional parameters for the LinkedServicesClient.CreateOrUpdate -// method. -type LinkedServicesClientCreateOrUpdateOptions struct { - // ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can - // be * for unconditional update. - IfMatch *string -} - -// LinkedServicesClientDeleteOptions contains the optional parameters for the LinkedServicesClient.Delete method. -type LinkedServicesClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// LinkedServicesClientGetOptions contains the optional parameters for the LinkedServicesClient.Get method. -type LinkedServicesClientGetOptions struct { - // ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if - // * was provided, then no content will be returned. - IfNoneMatch *string -} - -// LinkedServicesClientListByFactoryOptions contains the optional parameters for the LinkedServicesClient.NewListByFactoryPager -// method. -type LinkedServicesClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // LogLocationSettings - Log location settings. type LogLocationSettings struct { // REQUIRED; Log storage linked service reference. @@ -16625,9 +16425,16 @@ type LookupActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -16635,26 +16442,30 @@ type LookupActivity struct { // GetActivity implements the ActivityClassification interface for type LookupActivity. func (l *LookupActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: l.AdditionalProperties, + DependsOn: l.DependsOn, + Description: l.Description, Name: l.Name, + OnInactiveMarkAs: l.OnInactiveMarkAs, + State: l.State, Type: l.Type, - Description: l.Description, - DependsOn: l.DependsOn, UserProperties: l.UserProperties, - AdditionalProperties: l.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type LookupActivity. func (l *LookupActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: l.AdditionalProperties, + DependsOn: l.DependsOn, + Description: l.Description, LinkedServiceName: l.LinkedServiceName, - Policy: l.Policy, Name: l.Name, + OnInactiveMarkAs: l.OnInactiveMarkAs, + Policy: l.Policy, + State: l.State, Type: l.Type, - Description: l.Description, - DependsOn: l.DependsOn, UserProperties: l.UserProperties, - AdditionalProperties: l.AdditionalProperties, } } @@ -16697,12 +16508,12 @@ type MagentoLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MagentoLinkedService. func (m *MagentoLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -16715,8 +16526,8 @@ type MagentoLinkedServiceTypeProperties struct { AccessToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. UseEncryptedEndpoints any @@ -16766,15 +16577,15 @@ type MagentoObjectDataset struct { // GetDataset implements the DatasetClassification interface for type MagentoObjectDataset. func (m *MagentoObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -16812,26 +16623,26 @@ type MagentoSource struct { // GetCopySource implements the CopySourceClassification interface for type MagentoSource. func (m *MagentoSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type MagentoSource. func (m *MagentoSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: m.QueryTimeout, AdditionalColumns: m.AdditionalColumns, - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + QueryTimeout: m.QueryTimeout, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -16856,10 +16667,10 @@ type ManagedIdentityCredential struct { // GetCredential implements the CredentialClassification interface for type ManagedIdentityCredential. func (m *ManagedIdentityCredential) GetCredential() *Credential { return &Credential{ - Type: m.Type, - Description: m.Description, - Annotations: m.Annotations, AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, + Description: m.Description, + Type: m.Type, } } @@ -16911,9 +16722,9 @@ type ManagedIntegrationRuntime struct { // GetIntegrationRuntime implements the IntegrationRuntimeClassification interface for type ManagedIntegrationRuntime. func (m *ManagedIntegrationRuntime) GetIntegrationRuntime() *IntegrationRuntime { return &IntegrationRuntime{ - Type: m.Type, - Description: m.Description, AdditionalProperties: m.AdditionalProperties, + Description: m.Description, + Type: m.Type, } } @@ -16995,10 +16806,10 @@ type ManagedIntegrationRuntimeStatus struct { // GetIntegrationRuntimeStatus implements the IntegrationRuntimeStatusClassification interface for type ManagedIntegrationRuntimeStatus. func (m *ManagedIntegrationRuntimeStatus) GetIntegrationRuntimeStatus() *IntegrationRuntimeStatus { return &IntegrationRuntimeStatus{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, DataFactoryName: m.DataFactoryName, State: m.State, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17080,33 +16891,6 @@ type ManagedPrivateEndpointResource struct { Type *string } -// ManagedPrivateEndpointsClientCreateOrUpdateOptions contains the optional parameters for the ManagedPrivateEndpointsClient.CreateOrUpdate -// method. -type ManagedPrivateEndpointsClientCreateOrUpdateOptions struct { - // ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity - // or can be * for unconditional update. - IfMatch *string -} - -// ManagedPrivateEndpointsClientDeleteOptions contains the optional parameters for the ManagedPrivateEndpointsClient.Delete -// method. -type ManagedPrivateEndpointsClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// ManagedPrivateEndpointsClientGetOptions contains the optional parameters for the ManagedPrivateEndpointsClient.Get method. -type ManagedPrivateEndpointsClientGetOptions struct { - // ETag of the managed private endpoint entity. Should only be specified for get. If the ETag matches the existing entity - // tag, or if * was provided, then no content will be returned. - IfNoneMatch *string -} - -// ManagedPrivateEndpointsClientListByFactoryOptions contains the optional parameters for the ManagedPrivateEndpointsClient.NewListByFactoryPager -// method. -type ManagedPrivateEndpointsClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // ManagedVirtualNetwork - A managed Virtual Network associated with the Azure Data Factory type ManagedVirtualNetwork struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. @@ -17155,25 +16939,148 @@ type ManagedVirtualNetworkResource struct { Type *string } -// ManagedVirtualNetworksClientCreateOrUpdateOptions contains the optional parameters for the ManagedVirtualNetworksClient.CreateOrUpdate -// method. -type ManagedVirtualNetworksClientCreateOrUpdateOptions struct { - // ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity - // or can be * for unconditional update. - IfMatch *string +// MapperAttributeMapping - Source and target column mapping details. +type MapperAttributeMapping struct { + // Reference of the source column used in the mapping. It is used for 'Direct' mapping type only. + AttributeReference *MapperAttributeReference + + // List of references for source columns. It is used for 'Derived' and 'Aggregate' type mappings only. + AttributeReferences []*MapperAttributeReference + + // Expression used for 'Aggregate' and 'Derived' type mapping. + Expression *string + + // Name of the function used for 'Aggregate' and 'Derived' (except 'Advanced') type mapping. + FunctionName *string + + // Name of the target column. + Name *string + + // Type of the CDC attribute mapping. Note: 'Advanced' mapping type is also saved as 'Derived'. + Type *MappingType +} + +// MapperAttributeMappings - Attribute mapping details. +type MapperAttributeMappings struct { + // List of attribute mappings. + AttributeMappings []*MapperAttributeMapping +} + +// MapperAttributeReference - Attribute reference details for the referred column. +type MapperAttributeReference struct { + // Name of the table. + Entity *string + + // The connection reference for the connection. + EntityConnectionReference *MapperConnectionReference + + // Name of the column. + Name *string +} + +// MapperConnection - Source connection details. +type MapperConnection struct { + // REQUIRED; Type of connection via linked service or dataset. + Type *ConnectionType + + // List of name/value pairs for connection properties. + CommonDslConnectorProperties []*MapperDslConnectorProperties + + // A boolean indicating whether linked service is of type inline dataset. Currently only inline datasets are supported. + IsInlineDataset *bool + + // Linked service reference. + LinkedService *LinkedServiceReference + + // Type of the linked service e.g.: AzureBlobFS. + LinkedServiceType *string +} + +// MapperConnectionReference - Source or target connection reference details. +type MapperConnectionReference struct { + // Name of the connection + ConnectionName *string + + // Type of connection via linked service or dataset. + Type *ConnectionType +} + +// MapperDslConnectorProperties - Connector properties of a CDC table in terms of name / value pairs. +type MapperDslConnectorProperties struct { + // Name of the property. + Name *string + + // Value of the property. + Value any +} + +// MapperPolicy - CDC Policy. +type MapperPolicy struct { + // Mode of running the CDC: batch vs continuous. + Mode *string + + // Defines the frequency and interval for running the CDC for batch mode. + Recurrence *MapperPolicyRecurrence +} + +// MapperPolicyRecurrence - CDC policy recurrence details. +type MapperPolicyRecurrence struct { + // Frequency of period in terms of 'Hour', 'Minute' or 'Second'. + Frequency *FrequencyType + + // Actual interval value as per chosen frequency. + Interval *int32 +} + +// MapperSourceConnectionsInfo - A object which contains list of tables and connection details for a source connection. +type MapperSourceConnectionsInfo struct { + // Source connection details. + Connection *MapperConnection + + // List of source tables for a source connection. + SourceEntities []*MapperTable +} + +// MapperTable - CDC table details. +type MapperTable struct { + // Name of the table. + Name *string + + // Table properties. + Properties *MapperTableProperties +} + +// MapperTableProperties - Properties for a CDC table. +type MapperTableProperties struct { + // List of name/value pairs for connection properties. + DslConnectorProperties []*MapperDslConnectorProperties + + // List of columns for the source table. + Schema []*MapperTableSchema } -// ManagedVirtualNetworksClientGetOptions contains the optional parameters for the ManagedVirtualNetworksClient.Get method. -type ManagedVirtualNetworksClientGetOptions struct { - // ETag of the managed Virtual Network entity. Should only be specified for get. If the ETag matches the existing entity tag, - // or if * was provided, then no content will be returned. - IfNoneMatch *string +// MapperTableSchema - Schema of a CDC table in terms of column names and their corresponding data types. +type MapperTableSchema struct { + // Data type of the column. + DataType *string + + // Name of the column. + Name *string } -// ManagedVirtualNetworksClientListByFactoryOptions contains the optional parameters for the ManagedVirtualNetworksClient.NewListByFactoryPager -// method. -type ManagedVirtualNetworksClientListByFactoryOptions struct { - // placeholder for future optional parameters +// MapperTargetConnectionsInfo - A object which contains list of tables and connection details for a target connection. +type MapperTargetConnectionsInfo struct { + // Source connection details. + Connection *MapperConnection + + // List of table mappings. + DataMapperMappings []*DataMapperMapping + + // List of relationship info among the tables. + Relationships []any + + // List of source tables for a target connection. + TargetEntities []*MapperTable } // MappingDataFlow - Mapping data flow. @@ -17197,10 +17104,10 @@ type MappingDataFlow struct { // GetDataFlow implements the DataFlowClassification interface for type MappingDataFlow. func (m *MappingDataFlow) GetDataFlow() *DataFlow { return &DataFlow{ - Type: m.Type, - Description: m.Description, Annotations: m.Annotations, + Description: m.Description, Folder: m.Folder, + Type: m.Type, } } @@ -17249,12 +17156,12 @@ type MariaDBLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MariaDBLinkedService. func (m *MariaDBLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17264,8 +17171,8 @@ type MariaDBLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Pwd *AzureKeyVaultSecretReference @@ -17305,26 +17212,26 @@ type MariaDBSource struct { // GetCopySource implements the CopySourceClassification interface for type MariaDBSource. func (m *MariaDBSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type MariaDBSource. func (m *MariaDBSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: m.QueryTimeout, AdditionalColumns: m.AdditionalColumns, - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + QueryTimeout: m.QueryTimeout, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17365,15 +17272,15 @@ type MariaDBTableDataset struct { // GetDataset implements the DatasetClassification interface for type MariaDBTableDataset. func (m *MariaDBTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -17404,12 +17311,12 @@ type MarketoLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MarketoLinkedService. func (m *MarketoLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17425,8 +17332,8 @@ type MarketoLinkedServiceTypeProperties struct { ClientSecret SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. UseEncryptedEndpoints any @@ -17476,15 +17383,15 @@ type MarketoObjectDataset struct { // GetDataset implements the DatasetClassification interface for type MarketoObjectDataset. func (m *MarketoObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -17522,26 +17429,26 @@ type MarketoSource struct { // GetCopySource implements the CopySourceClassification interface for type MarketoSource. func (m *MarketoSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type MarketoSource. func (m *MarketoSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: m.QueryTimeout, AdditionalColumns: m.AdditionalColumns, - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + QueryTimeout: m.QueryTimeout, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17581,19 +17488,20 @@ type MicrosoftAccessLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MicrosoftAccessLinkedService. func (m *MicrosoftAccessLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } // MicrosoftAccessLinkedServiceTypeProperties - Microsoft Access linked service properties. type MicrosoftAccessLinkedServiceTypeProperties struct { // REQUIRED; The non-access credential portion of the connection string as well as an optional encrypted credential. Type: - // string, SecureString or AzureKeyVaultSecretReference. + // string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType + // string. ConnectionString any // Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. @@ -17604,8 +17512,8 @@ type MicrosoftAccessLinkedServiceTypeProperties struct { Credential SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for Basic authentication. Password SecretBaseClassification @@ -17647,14 +17555,14 @@ type MicrosoftAccessSink struct { // GetCopySink implements the CopySinkClassification interface for type MicrosoftAccessSink. func (m *MicrosoftAccessSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + SinkRetryCount: m.SinkRetryCount, + SinkRetryWait: m.SinkRetryWait, Type: m.Type, WriteBatchSize: m.WriteBatchSize, WriteBatchTimeout: m.WriteBatchTimeout, - SinkRetryCount: m.SinkRetryCount, - SinkRetryWait: m.SinkRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, } } @@ -17689,12 +17597,12 @@ type MicrosoftAccessSource struct { // GetCopySource implements the CopySourceClassification interface for type MicrosoftAccessSource. func (m *MicrosoftAccessSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17735,15 +17643,15 @@ type MicrosoftAccessTableDataset struct { // GetDataset implements the DatasetClassification interface for type MicrosoftAccessTableDataset. func (m *MicrosoftAccessTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -17790,15 +17698,15 @@ type MongoDbAtlasCollectionDataset struct { // GetDataset implements the DatasetClassification interface for type MongoDbAtlasCollectionDataset. func (m *MongoDbAtlasCollectionDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -17835,12 +17743,12 @@ type MongoDbAtlasLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MongoDbAtlasLinkedService. func (m *MongoDbAtlasLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17852,6 +17760,9 @@ type MongoDbAtlasLinkedServiceTypeProperties struct { // REQUIRED; The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). Database any + + // The driver version that you want to choose. Allowed value are v1 and v2. Type: string (or Expression with resultType string). + DriverVersion any } // MongoDbAtlasSink - A copy activity MongoDB Atlas sink. @@ -17889,14 +17800,14 @@ type MongoDbAtlasSink struct { // GetCopySink implements the CopySinkClassification interface for type MongoDbAtlasSink. func (m *MongoDbAtlasSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + SinkRetryCount: m.SinkRetryCount, + SinkRetryWait: m.SinkRetryWait, Type: m.Type, WriteBatchSize: m.WriteBatchSize, WriteBatchTimeout: m.WriteBatchTimeout, - SinkRetryCount: m.SinkRetryCount, - SinkRetryWait: m.SinkRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, } } @@ -17943,12 +17854,12 @@ type MongoDbAtlasSource struct { // GetCopySource implements the CopySourceClassification interface for type MongoDbAtlasSource. func (m *MongoDbAtlasSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -17989,15 +17900,15 @@ type MongoDbCollectionDataset struct { // GetDataset implements the DatasetClassification interface for type MongoDbCollectionDataset. func (m *MongoDbCollectionDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -18057,12 +17968,12 @@ type MongoDbLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MongoDbLinkedService. func (m *MongoDbLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -18089,8 +18000,8 @@ type MongoDbLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for authentication. Password SecretBaseClassification @@ -18134,12 +18045,12 @@ type MongoDbSource struct { // GetCopySource implements the CopySourceClassification interface for type MongoDbSource. func (m *MongoDbSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -18180,15 +18091,15 @@ type MongoDbV2CollectionDataset struct { // GetDataset implements the DatasetClassification interface for type MongoDbV2CollectionDataset. func (m *MongoDbV2CollectionDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -18225,12 +18136,12 @@ type MongoDbV2LinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MongoDbV2LinkedService. func (m *MongoDbV2LinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -18279,14 +18190,14 @@ type MongoDbV2Sink struct { // GetCopySink implements the CopySinkClassification interface for type MongoDbV2Sink. func (m *MongoDbV2Sink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + SinkRetryCount: m.SinkRetryCount, + SinkRetryWait: m.SinkRetryWait, Type: m.Type, WriteBatchSize: m.WriteBatchSize, WriteBatchTimeout: m.WriteBatchTimeout, - SinkRetryCount: m.SinkRetryCount, - SinkRetryWait: m.SinkRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, } } @@ -18333,25 +18244,15 @@ type MongoDbV2Source struct { // GetCopySource implements the CopySourceClassification interface for type MongoDbV2Source. func (m *MongoDbV2Source) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } -// MultiplePipelineTriggerClassification provides polymorphic access to related types. -// Call the interface's GetMultiplePipelineTrigger() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *BlobEventsTrigger, *BlobTrigger, *CustomEventsTrigger, *MultiplePipelineTrigger, *ScheduleTrigger -type MultiplePipelineTriggerClassification interface { - TriggerClassification - // GetMultiplePipelineTrigger returns the MultiplePipelineTrigger content of the underlying type. - GetMultiplePipelineTrigger() *MultiplePipelineTrigger -} - // MultiplePipelineTrigger - Base class for all triggers that support one to many model for trigger to pipeline. type MultiplePipelineTrigger struct { // REQUIRED; Trigger type. @@ -18379,11 +18280,11 @@ func (m *MultiplePipelineTrigger) GetMultiplePipelineTrigger() *MultiplePipeline // GetTrigger implements the TriggerClassification interface for type MultiplePipelineTrigger. func (m *MultiplePipelineTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, RuntimeState: m.RuntimeState, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -18414,23 +18315,23 @@ type MySQLLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type MySQLLinkedService. func (m *MySQLLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, ConnectVia: m.ConnectVia, Description: m.Description, Parameters: m.Parameters, - Annotations: m.Annotations, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } // MySQLLinkedServiceTypeProperties - MySQL linked service properties. type MySQLLinkedServiceTypeProperties struct { - // REQUIRED; The connection string. + // REQUIRED; The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -18470,26 +18371,26 @@ type MySQLSource struct { // GetCopySource implements the CopySourceClassification interface for type MySQLSource. func (m *MySQLSource) GetCopySource() *CopySource { return &CopySource{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type MySQLSource. func (m *MySQLSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: m.QueryTimeout, AdditionalColumns: m.AdditionalColumns, - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + DisableMetricsCollection: m.DisableMetricsCollection, + MaxConcurrentConnections: m.MaxConcurrentConnections, + QueryTimeout: m.QueryTimeout, SourceRetryCount: m.SourceRetryCount, SourceRetryWait: m.SourceRetryWait, - MaxConcurrentConnections: m.MaxConcurrentConnections, - DisableMetricsCollection: m.DisableMetricsCollection, - AdditionalProperties: m.AdditionalProperties, + Type: m.Type, } } @@ -18530,15 +18431,15 @@ type MySQLTableDataset struct { // GetDataset implements the DatasetClassification interface for type MySQLTableDataset. func (m *MySQLTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: m.Type, + AdditionalProperties: m.AdditionalProperties, + Annotations: m.Annotations, Description: m.Description, - Structure: m.Structure, - Schema: m.Schema, + Folder: m.Folder, LinkedServiceName: m.LinkedServiceName, Parameters: m.Parameters, - Annotations: m.Annotations, - Folder: m.Folder, - AdditionalProperties: m.AdditionalProperties, + Schema: m.Schema, + Structure: m.Structure, + Type: m.Type, } } @@ -18575,12 +18476,12 @@ type NetezzaLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type NetezzaLinkedService. func (n *NetezzaLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: n.Type, + AdditionalProperties: n.AdditionalProperties, + Annotations: n.Annotations, ConnectVia: n.ConnectVia, Description: n.Description, Parameters: n.Parameters, - Annotations: n.Annotations, - AdditionalProperties: n.AdditionalProperties, + Type: n.Type, } } @@ -18590,8 +18491,8 @@ type NetezzaLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Pwd *AzureKeyVaultSecretReference @@ -18652,26 +18553,26 @@ type NetezzaSource struct { // GetCopySource implements the CopySourceClassification interface for type NetezzaSource. func (n *NetezzaSource) GetCopySource() *CopySource { return &CopySource{ - Type: n.Type, - SourceRetryCount: n.SourceRetryCount, - SourceRetryWait: n.SourceRetryWait, - MaxConcurrentConnections: n.MaxConcurrentConnections, - DisableMetricsCollection: n.DisableMetricsCollection, AdditionalProperties: n.AdditionalProperties, + DisableMetricsCollection: n.DisableMetricsCollection, + MaxConcurrentConnections: n.MaxConcurrentConnections, + SourceRetryCount: n.SourceRetryCount, + SourceRetryWait: n.SourceRetryWait, + Type: n.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type NetezzaSource. func (n *NetezzaSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: n.QueryTimeout, AdditionalColumns: n.AdditionalColumns, - Type: n.Type, + AdditionalProperties: n.AdditionalProperties, + DisableMetricsCollection: n.DisableMetricsCollection, + MaxConcurrentConnections: n.MaxConcurrentConnections, + QueryTimeout: n.QueryTimeout, SourceRetryCount: n.SourceRetryCount, SourceRetryWait: n.SourceRetryWait, - MaxConcurrentConnections: n.MaxConcurrentConnections, - DisableMetricsCollection: n.DisableMetricsCollection, - AdditionalProperties: n.AdditionalProperties, + Type: n.Type, } } @@ -18712,15 +18613,15 @@ type NetezzaTableDataset struct { // GetDataset implements the DatasetClassification interface for type NetezzaTableDataset. func (n *NetezzaTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: n.Type, + AdditionalProperties: n.AdditionalProperties, + Annotations: n.Annotations, Description: n.Description, - Structure: n.Structure, - Schema: n.Schema, + Folder: n.Folder, LinkedServiceName: n.LinkedServiceName, Parameters: n.Parameters, - Annotations: n.Annotations, - Folder: n.Folder, - AdditionalProperties: n.AdditionalProperties, + Schema: n.Schema, + Structure: n.Structure, + Type: n.Type, } } @@ -18772,12 +18673,12 @@ type ODataLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ODataLinkedService. func (o *ODataLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, ConnectVia: o.ConnectVia, Description: o.Description, Parameters: o.Parameters, - Annotations: o.Annotations, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -18805,8 +18706,8 @@ type ODataLinkedServiceTypeProperties struct { AzureCloudType any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password of the OData service. Password SecretBaseClassification @@ -18872,15 +18773,15 @@ type ODataResourceDataset struct { // GetDataset implements the DatasetClassification interface for type ODataResourceDataset. func (o *ODataResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, Description: o.Description, - Structure: o.Structure, - Schema: o.Schema, + Folder: o.Folder, LinkedServiceName: o.LinkedServiceName, Parameters: o.Parameters, - Annotations: o.Annotations, - Folder: o.Folder, - AdditionalProperties: o.AdditionalProperties, + Schema: o.Schema, + Structure: o.Structure, + Type: o.Type, } } @@ -18926,12 +18827,12 @@ type ODataSource struct { // GetCopySource implements the CopySourceClassification interface for type ODataSource. func (o *ODataSource) GetCopySource() *CopySource { return &CopySource{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -18962,19 +18863,20 @@ type OdbcLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type OdbcLinkedService. func (o *OdbcLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, ConnectVia: o.ConnectVia, Description: o.Description, Parameters: o.Parameters, - Annotations: o.Annotations, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } // OdbcLinkedServiceTypeProperties - ODBC linked service properties. type OdbcLinkedServiceTypeProperties struct { // REQUIRED; The non-access credential portion of the connection string as well as an optional encrypted credential. Type: - // string, SecureString or AzureKeyVaultSecretReference. + // string, or SecureString, or AzureKeyVaultSecretReference, or Expression with resultType + // string. ConnectionString any // Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or @@ -18985,8 +18887,8 @@ type OdbcLinkedServiceTypeProperties struct { Credential SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for Basic authentication. Password SecretBaseClassification @@ -19028,14 +18930,14 @@ type OdbcSink struct { // GetCopySink implements the CopySinkClassification interface for type OdbcSink. func (o *OdbcSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, + SinkRetryCount: o.SinkRetryCount, + SinkRetryWait: o.SinkRetryWait, Type: o.Type, WriteBatchSize: o.WriteBatchSize, WriteBatchTimeout: o.WriteBatchTimeout, - SinkRetryCount: o.SinkRetryCount, - SinkRetryWait: o.SinkRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, } } @@ -19073,26 +18975,26 @@ type OdbcSource struct { // GetCopySource implements the CopySourceClassification interface for type OdbcSource. func (o *OdbcSource) GetCopySource() *CopySource { return &CopySource{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type OdbcSource. func (o *OdbcSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: o.QueryTimeout, AdditionalColumns: o.AdditionalColumns, - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, + QueryTimeout: o.QueryTimeout, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19133,15 +19035,15 @@ type OdbcTableDataset struct { // GetDataset implements the DatasetClassification interface for type OdbcTableDataset. func (o *OdbcTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, Description: o.Description, - Structure: o.Structure, - Schema: o.Schema, + Folder: o.Folder, LinkedServiceName: o.LinkedServiceName, Parameters: o.Parameters, - Annotations: o.Annotations, - Folder: o.Folder, - AdditionalProperties: o.AdditionalProperties, + Schema: o.Schema, + Structure: o.Structure, + Type: o.Type, } } @@ -19188,15 +19090,15 @@ type Office365Dataset struct { // GetDataset implements the DatasetClassification interface for type Office365Dataset. func (o *Office365Dataset) GetDataset() *Dataset { return &Dataset{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, Description: o.Description, - Structure: o.Structure, - Schema: o.Schema, + Folder: o.Folder, LinkedServiceName: o.LinkedServiceName, Parameters: o.Parameters, - Annotations: o.Annotations, - Folder: o.Folder, - AdditionalProperties: o.AdditionalProperties, + Schema: o.Schema, + Structure: o.Structure, + Type: o.Type, } } @@ -19237,12 +19139,12 @@ type Office365LinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type Office365LinkedService. func (o *Office365LinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, ConnectVia: o.ConnectVia, Description: o.Description, Parameters: o.Parameters, - Annotations: o.Annotations, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19262,8 +19164,8 @@ type Office365LinkedServiceTypeProperties struct { ServicePrincipalTenantID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // Office365Source - A copy activity source for an Office 365 service. @@ -19290,7 +19192,8 @@ type Office365Source struct { MaxConcurrentConnections any // The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). - // Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] + // itemType: OutputColumn. Example: [ { "name": "Id" }, { "name": + // "CreatedDateTime" } ] OutputColumns any // Source retry count. Type: integer (or Expression with resultType integer). @@ -19309,12 +19212,12 @@ type Office365Source struct { // GetCopySource implements the CopySourceClassification interface for type Office365Source. func (o *Office365Source) GetCopySource() *CopySource { return &CopySource{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19438,11 +19341,6 @@ type OperationServiceSpecification struct { MetricSpecifications []*OperationMetricSpecification } -// OperationsClientListOptions contains the optional parameters for the OperationsClient.NewListPager method. -type OperationsClientListOptions struct { - // placeholder for future optional parameters -} - // OracleCloudStorageLinkedService - Linked service for Oracle Cloud Storage. type OracleCloudStorageLinkedService struct { // REQUIRED; Type of linked service. @@ -19470,12 +19368,12 @@ type OracleCloudStorageLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type OracleCloudStorageLinkedService. func (o *OracleCloudStorageLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, ConnectVia: o.ConnectVia, Description: o.Description, Parameters: o.Parameters, - Annotations: o.Annotations, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19486,8 +19384,8 @@ type OracleCloudStorageLinkedServiceTypeProperties struct { AccessKeyID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The secret access key of the Oracle Cloud Storage Identity and Access Management (IAM) user. SecretAccessKey SecretBaseClassification @@ -19522,10 +19420,10 @@ type OracleCloudStorageLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type OracleCloudStorageLocation. func (o *OracleCloudStorageLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: o.Type, - FolderPath: o.FolderPath, - FileName: o.FileName, AdditionalProperties: o.AdditionalProperties, + FileName: o.FileName, + FolderPath: o.FolderPath, + Type: o.Type, } } @@ -19544,8 +19442,8 @@ type OracleCloudStorageReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -19580,10 +19478,10 @@ type OracleCloudStorageReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type OracleCloudStorageReadSettings. func (o *OracleCloudStorageReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: o.Type, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, + Type: o.Type, } } @@ -19614,12 +19512,12 @@ type OracleLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type OracleLinkedService. func (o *OracleLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, ConnectVia: o.ConnectVia, Description: o.Description, Parameters: o.Parameters, - Annotations: o.Annotations, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19629,8 +19527,8 @@ type OracleLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -19681,12 +19579,12 @@ type OracleServiceCloudLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type OracleServiceCloudLinkedService. func (o *OracleServiceCloudLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, ConnectVia: o.ConnectVia, Description: o.Description, Parameters: o.Parameters, - Annotations: o.Annotations, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19702,8 +19600,8 @@ type OracleServiceCloudLinkedServiceTypeProperties struct { Username any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression // with resultType boolean). @@ -19756,15 +19654,15 @@ type OracleServiceCloudObjectDataset struct { // GetDataset implements the DatasetClassification interface for type OracleServiceCloudObjectDataset. func (o *OracleServiceCloudObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, Description: o.Description, - Structure: o.Structure, - Schema: o.Schema, + Folder: o.Folder, LinkedServiceName: o.LinkedServiceName, Parameters: o.Parameters, - Annotations: o.Annotations, - Folder: o.Folder, - AdditionalProperties: o.AdditionalProperties, + Schema: o.Schema, + Structure: o.Structure, + Type: o.Type, } } @@ -19802,26 +19700,26 @@ type OracleServiceCloudSource struct { // GetCopySource implements the CopySourceClassification interface for type OracleServiceCloudSource. func (o *OracleServiceCloudSource) GetCopySource() *CopySource { return &CopySource{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type OracleServiceCloudSource. func (o *OracleServiceCloudSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: o.QueryTimeout, AdditionalColumns: o.AdditionalColumns, - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, + QueryTimeout: o.QueryTimeout, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19858,14 +19756,14 @@ type OracleSink struct { // GetCopySink implements the CopySinkClassification interface for type OracleSink. func (o *OracleSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, + SinkRetryCount: o.SinkRetryCount, + SinkRetryWait: o.SinkRetryWait, Type: o.Type, WriteBatchSize: o.WriteBatchSize, WriteBatchTimeout: o.WriteBatchTimeout, - SinkRetryCount: o.SinkRetryCount, - SinkRetryWait: o.SinkRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, } } @@ -19910,12 +19808,12 @@ type OracleSource struct { // GetCopySource implements the CopySourceClassification interface for type OracleSource. func (o *OracleSource) GetCopySource() *CopySource { return &CopySource{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -19956,15 +19854,15 @@ type OracleTableDataset struct { // GetDataset implements the DatasetClassification interface for type OracleTableDataset. func (o *OracleTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, Description: o.Description, - Structure: o.Structure, - Schema: o.Schema, + Folder: o.Folder, LinkedServiceName: o.LinkedServiceName, Parameters: o.Parameters, - Annotations: o.Annotations, - Folder: o.Folder, - AdditionalProperties: o.AdditionalProperties, + Schema: o.Schema, + Structure: o.Structure, + Type: o.Type, } } @@ -20017,15 +19915,15 @@ type OrcDataset struct { // GetDataset implements the DatasetClassification interface for type OrcDataset. func (o *OrcDataset) GetDataset() *Dataset { return &Dataset{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + Annotations: o.Annotations, Description: o.Description, - Structure: o.Structure, - Schema: o.Schema, + Folder: o.Folder, LinkedServiceName: o.LinkedServiceName, Parameters: o.Parameters, - Annotations: o.Annotations, - Folder: o.Folder, - AdditionalProperties: o.AdditionalProperties, + Schema: o.Schema, + Structure: o.Structure, + Type: o.Type, } } @@ -20056,10 +19954,10 @@ type OrcFormat struct { // GetDatasetStorageFormat implements the DatasetStorageFormatClassification interface for type OrcFormat. func (o *OrcFormat) GetDatasetStorageFormat() *DatasetStorageFormat { return &DatasetStorageFormat{ - Type: o.Type, - Serializer: o.Serializer, - Deserializer: o.Deserializer, AdditionalProperties: o.AdditionalProperties, + Deserializer: o.Deserializer, + Serializer: o.Serializer, + Type: o.Type, } } @@ -20099,14 +19997,14 @@ type OrcSink struct { // GetCopySink implements the CopySinkClassification interface for type OrcSink. func (o *OrcSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, + SinkRetryCount: o.SinkRetryCount, + SinkRetryWait: o.SinkRetryWait, Type: o.Type, WriteBatchSize: o.WriteBatchSize, WriteBatchTimeout: o.WriteBatchTimeout, - SinkRetryCount: o.SinkRetryCount, - SinkRetryWait: o.SinkRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, } } @@ -20141,12 +20039,12 @@ type OrcSource struct { // GetCopySource implements the CopySourceClassification interface for type OrcSource. func (o *OrcSource) GetCopySource() *CopySource { return &CopySource{ - Type: o.Type, + AdditionalProperties: o.AdditionalProperties, + DisableMetricsCollection: o.DisableMetricsCollection, + MaxConcurrentConnections: o.MaxConcurrentConnections, SourceRetryCount: o.SourceRetryCount, SourceRetryWait: o.SourceRetryWait, - MaxConcurrentConnections: o.MaxConcurrentConnections, - DisableMetricsCollection: o.DisableMetricsCollection, - AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -20170,8 +20068,8 @@ type OrcWriteSettings struct { // GetFormatWriteSettings implements the FormatWriteSettingsClassification interface for type OrcWriteSettings. func (o *OrcWriteSettings) GetFormatWriteSettings() *FormatWriteSettings { return &FormatWriteSettings{ - Type: o.Type, AdditionalProperties: o.AdditionalProperties, + Type: o.Type, } } @@ -20230,15 +20128,15 @@ type ParquetDataset struct { // GetDataset implements the DatasetClassification interface for type ParquetDataset. func (p *ParquetDataset) GetDataset() *Dataset { return &Dataset{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, Description: p.Description, - Structure: p.Structure, - Schema: p.Schema, + Folder: p.Folder, LinkedServiceName: p.LinkedServiceName, Parameters: p.Parameters, - Annotations: p.Annotations, - Folder: p.Folder, - AdditionalProperties: p.AdditionalProperties, + Schema: p.Schema, + Structure: p.Structure, + Type: p.Type, } } @@ -20269,10 +20167,30 @@ type ParquetFormat struct { // GetDatasetStorageFormat implements the DatasetStorageFormatClassification interface for type ParquetFormat. func (p *ParquetFormat) GetDatasetStorageFormat() *DatasetStorageFormat { return &DatasetStorageFormat{ - Type: p.Type, - Serializer: p.Serializer, + AdditionalProperties: p.AdditionalProperties, Deserializer: p.Deserializer, + Serializer: p.Serializer, + Type: p.Type, + } +} + +// ParquetReadSettings - Parquet read settings. +type ParquetReadSettings struct { + // REQUIRED; The read setting type. + Type *string + + // OPTIONAL; Contains additional key/value pairs not defined in the schema. + AdditionalProperties map[string]any + + // Compression settings. + CompressionProperties CompressionReadSettingsClassification +} + +// GetFormatReadSettings implements the FormatReadSettingsClassification interface for type ParquetReadSettings. +func (p *ParquetReadSettings) GetFormatReadSettings() *FormatReadSettings { + return &FormatReadSettings{ AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20312,14 +20230,14 @@ type ParquetSink struct { // GetCopySink implements the CopySinkClassification interface for type ParquetSink. func (p *ParquetSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, + SinkRetryCount: p.SinkRetryCount, + SinkRetryWait: p.SinkRetryWait, Type: p.Type, WriteBatchSize: p.WriteBatchSize, WriteBatchTimeout: p.WriteBatchTimeout, - SinkRetryCount: p.SinkRetryCount, - SinkRetryWait: p.SinkRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, } } @@ -20338,6 +20256,9 @@ type ParquetSource struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any + // Parquet format settings. + FormatSettings *ParquetReadSettings + // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any @@ -20354,12 +20275,12 @@ type ParquetSource struct { // GetCopySource implements the CopySourceClassification interface for type ParquetSource. func (p *ParquetSource) GetCopySource() *CopySource { return &CopySource{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20383,8 +20304,8 @@ type ParquetWriteSettings struct { // GetFormatWriteSettings implements the FormatWriteSettingsClassification interface for type ParquetWriteSettings. func (p *ParquetWriteSettings) GetFormatWriteSettings() *FormatWriteSettings { return &FormatWriteSettings{ - Type: p.Type, AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20415,12 +20336,12 @@ type PaypalLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type PaypalLinkedService. func (p *PaypalLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, ConnectVia: p.ConnectVia, Description: p.Description, Parameters: p.Parameters, - Annotations: p.Annotations, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20436,8 +20357,8 @@ type PaypalLinkedServiceTypeProperties struct { ClientSecret SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. UseEncryptedEndpoints any @@ -20487,15 +20408,15 @@ type PaypalObjectDataset struct { // GetDataset implements the DatasetClassification interface for type PaypalObjectDataset. func (p *PaypalObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, Description: p.Description, - Structure: p.Structure, - Schema: p.Schema, + Folder: p.Folder, LinkedServiceName: p.LinkedServiceName, Parameters: p.Parameters, - Annotations: p.Annotations, - Folder: p.Folder, - AdditionalProperties: p.AdditionalProperties, + Schema: p.Schema, + Structure: p.Structure, + Type: p.Type, } } @@ -20533,26 +20454,26 @@ type PaypalSource struct { // GetCopySource implements the CopySourceClassification interface for type PaypalSource. func (p *PaypalSource) GetCopySource() *CopySource { return &CopySource{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type PaypalSource. func (p *PaypalSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: p.QueryTimeout, AdditionalColumns: p.AdditionalColumns, - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, + QueryTimeout: p.QueryTimeout, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20595,12 +20516,12 @@ type PhoenixLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type PhoenixLinkedService. func (p *PhoenixLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, ConnectVia: p.ConnectVia, Description: p.Description, Parameters: p.Parameters, - Annotations: p.Annotations, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20623,8 +20544,8 @@ type PhoenixLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix // if using WindowsAzureHDInsightService. @@ -20686,15 +20607,15 @@ type PhoenixObjectDataset struct { // GetDataset implements the DatasetClassification interface for type PhoenixObjectDataset. func (p *PhoenixObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, Description: p.Description, - Structure: p.Structure, - Schema: p.Schema, + Folder: p.Folder, LinkedServiceName: p.LinkedServiceName, Parameters: p.Parameters, - Annotations: p.Annotations, - Folder: p.Folder, - AdditionalProperties: p.AdditionalProperties, + Schema: p.Schema, + Structure: p.Structure, + Type: p.Type, } } @@ -20732,26 +20653,26 @@ type PhoenixSource struct { // GetCopySource implements the CopySourceClassification interface for type PhoenixSource. func (p *PhoenixSource) GetCopySource() *CopySource { return &CopySource{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type PhoenixSource. func (p *PhoenixSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: p.QueryTimeout, AdditionalColumns: p.AdditionalColumns, - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, + QueryTimeout: p.QueryTimeout, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -20796,6 +20717,12 @@ type PipelineExternalComputeScaleProperties struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. AdditionalProperties map[string]any + // Number of the the external nodes, which should be greater than 0 and less than 11. + NumberOfExternalNodes *int32 + + // Number of the pipeline nodes, which should be greater than 0 and less than 11. + NumberOfPipelineNodes *int32 + // Time to live (in minutes) setting of integration runtime which will execute pipeline and external activity. TimeToLive *int32 } @@ -20917,22 +20844,6 @@ type PipelineRunInvokedBy struct { PipelineRunID *string } -// PipelineRunsClientCancelOptions contains the optional parameters for the PipelineRunsClient.Cancel method. -type PipelineRunsClientCancelOptions struct { - // If true, cancel all the Child pipelines that are triggered by the current pipeline. - IsRecursive *bool -} - -// PipelineRunsClientGetOptions contains the optional parameters for the PipelineRunsClient.Get method. -type PipelineRunsClientGetOptions struct { - // placeholder for future optional parameters -} - -// PipelineRunsClientQueryByFactoryOptions contains the optional parameters for the PipelineRunsClient.QueryByFactory method. -type PipelineRunsClientQueryByFactoryOptions struct { - // placeholder for future optional parameters -} - // PipelineRunsQueryResponse - A list pipeline runs. type PipelineRunsQueryResponse struct { // REQUIRED; List of pipeline runs. @@ -20942,46 +20853,6 @@ type PipelineRunsQueryResponse struct { ContinuationToken *string } -// PipelinesClientCreateOrUpdateOptions contains the optional parameters for the PipelinesClient.CreateOrUpdate method. -type PipelinesClientCreateOrUpdateOptions struct { - // ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * - // for unconditional update. - IfMatch *string -} - -// PipelinesClientCreateRunOptions contains the optional parameters for the PipelinesClient.CreateRun method. -type PipelinesClientCreateRunOptions struct { - // Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped - // under the same groupId. - IsRecovery *bool - // Parameters of the pipeline run. These parameters will be used only if the runId is not specified. - Parameters map[string]any - // The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. - ReferencePipelineRunID *string - // In recovery mode, the rerun will start from this activity. If not specified, all activities will run. - StartActivityName *string - // In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName - // is not specified. - StartFromFailure *bool -} - -// PipelinesClientDeleteOptions contains the optional parameters for the PipelinesClient.Delete method. -type PipelinesClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// PipelinesClientGetOptions contains the optional parameters for the PipelinesClient.Get method. -type PipelinesClientGetOptions struct { - // ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was - // provided, then no content will be returned. - IfNoneMatch *string -} - -// PipelinesClientListByFactoryOptions contains the optional parameters for the PipelinesClient.NewListByFactoryPager method. -type PipelinesClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // PolybaseSettings - PolyBase settings. type PolybaseSettings struct { // OPTIONAL; Contains additional key/value pairs not defined in the schema. @@ -21030,23 +20901,23 @@ type PostgreSQLLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type PostgreSQLLinkedService. func (p *PostgreSQLLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, ConnectVia: p.ConnectVia, Description: p.Description, Parameters: p.Parameters, - Annotations: p.Annotations, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } // PostgreSQLLinkedServiceTypeProperties - PostgreSQL linked service properties. type PostgreSQLLinkedServiceTypeProperties struct { - // REQUIRED; The connection string. + // REQUIRED; The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -21086,26 +20957,26 @@ type PostgreSQLSource struct { // GetCopySource implements the CopySourceClassification interface for type PostgreSQLSource. func (p *PostgreSQLSource) GetCopySource() *CopySource { return &CopySource{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type PostgreSQLSource. func (p *PostgreSQLSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: p.QueryTimeout, AdditionalColumns: p.AdditionalColumns, - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, + QueryTimeout: p.QueryTimeout, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -21146,15 +21017,15 @@ type PostgreSQLTableDataset struct { // GetDataset implements the DatasetClassification interface for type PostgreSQLTableDataset. func (p *PostgreSQLTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, Description: p.Description, - Structure: p.Structure, - Schema: p.Schema, + Folder: p.Folder, LinkedServiceName: p.LinkedServiceName, Parameters: p.Parameters, - Annotations: p.Annotations, - Folder: p.Folder, - AdditionalProperties: p.AdditionalProperties, + Schema: p.Schema, + Structure: p.Structure, + Type: p.Type, } } @@ -21281,12 +21152,12 @@ type PrestoLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type PrestoLinkedService. func (p *PrestoLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, ConnectVia: p.ConnectVia, Description: p.Description, Parameters: p.Parameters, - Annotations: p.Annotations, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } @@ -21315,8 +21186,8 @@ type PrestoLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password corresponding to the user name. Password SecretBaseClassification @@ -21378,15 +21249,15 @@ type PrestoObjectDataset struct { // GetDataset implements the DatasetClassification interface for type PrestoObjectDataset. func (p *PrestoObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + Annotations: p.Annotations, Description: p.Description, - Structure: p.Structure, - Schema: p.Schema, + Folder: p.Folder, LinkedServiceName: p.LinkedServiceName, Parameters: p.Parameters, - Annotations: p.Annotations, - Folder: p.Folder, - AdditionalProperties: p.AdditionalProperties, + Schema: p.Schema, + Structure: p.Structure, + Type: p.Type, } } @@ -21424,63 +21295,35 @@ type PrestoSource struct { // GetCopySource implements the CopySourceClassification interface for type PrestoSource. func (p *PrestoSource) GetCopySource() *CopySource { return &CopySource{ - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type PrestoSource. func (p *PrestoSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: p.QueryTimeout, AdditionalColumns: p.AdditionalColumns, - Type: p.Type, + AdditionalProperties: p.AdditionalProperties, + DisableMetricsCollection: p.DisableMetricsCollection, + MaxConcurrentConnections: p.MaxConcurrentConnections, + QueryTimeout: p.QueryTimeout, SourceRetryCount: p.SourceRetryCount, SourceRetryWait: p.SourceRetryWait, - MaxConcurrentConnections: p.MaxConcurrentConnections, - DisableMetricsCollection: p.DisableMetricsCollection, - AdditionalProperties: p.AdditionalProperties, + Type: p.Type, } } -// PrivateEndPointConnectionsClientListByFactoryOptions contains the optional parameters for the PrivateEndPointConnectionsClient.NewListByFactoryPager -// method. -type PrivateEndPointConnectionsClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - // PrivateEndpoint - Private endpoint which a connection belongs to. type PrivateEndpoint struct { // The resource Id for private endpoint ID *string } -// PrivateEndpointConnectionClientCreateOrUpdateOptions contains the optional parameters for the PrivateEndpointConnectionClient.CreateOrUpdate -// method. -type PrivateEndpointConnectionClientCreateOrUpdateOptions struct { - // ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing - // entity or can be * for unconditional update. - IfMatch *string -} - -// PrivateEndpointConnectionClientDeleteOptions contains the optional parameters for the PrivateEndpointConnectionClient.Delete -// method. -type PrivateEndpointConnectionClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// PrivateEndpointConnectionClientGetOptions contains the optional parameters for the PrivateEndpointConnectionClient.Get -// method. -type PrivateEndpointConnectionClientGetOptions struct { - // ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity - // tag, or if * was provided, then no content will be returned. - IfNoneMatch *string -} - // PrivateEndpointConnectionListResponse - A list of linked service resources. type PrivateEndpointConnectionListResponse struct { // REQUIRED; List of Private Endpoint Connections. @@ -21577,11 +21420,6 @@ type PrivateLinkResourceProperties struct { RequiredZoneNames []*string } -// PrivateLinkResourcesClientGetOptions contains the optional parameters for the PrivateLinkResourcesClient.Get method. -type PrivateLinkResourcesClientGetOptions struct { - // placeholder for future optional parameters -} - // PrivateLinkResourcesWrapper - Wrapper for a collection of private link resources type PrivateLinkResourcesWrapper struct { // REQUIRED @@ -21630,12 +21468,12 @@ type QuickBooksLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type QuickBooksLinkedService. func (q *QuickBooksLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: q.Type, + AdditionalProperties: q.AdditionalProperties, + Annotations: q.Annotations, ConnectVia: q.ConnectVia, Description: q.Description, Parameters: q.Parameters, - Annotations: q.Annotations, - AdditionalProperties: q.AdditionalProperties, + Type: q.Type, } } @@ -21661,8 +21499,8 @@ type QuickBooksLinkedServiceTypeProperties struct { ConsumerSecret SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) Endpoint any @@ -21708,15 +21546,15 @@ type QuickBooksObjectDataset struct { // GetDataset implements the DatasetClassification interface for type QuickBooksObjectDataset. func (q *QuickBooksObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: q.Type, + AdditionalProperties: q.AdditionalProperties, + Annotations: q.Annotations, Description: q.Description, - Structure: q.Structure, - Schema: q.Schema, + Folder: q.Folder, LinkedServiceName: q.LinkedServiceName, Parameters: q.Parameters, - Annotations: q.Annotations, - Folder: q.Folder, - AdditionalProperties: q.AdditionalProperties, + Schema: q.Schema, + Structure: q.Structure, + Type: q.Type, } } @@ -21754,26 +21592,26 @@ type QuickBooksSource struct { // GetCopySource implements the CopySourceClassification interface for type QuickBooksSource. func (q *QuickBooksSource) GetCopySource() *CopySource { return &CopySource{ - Type: q.Type, + AdditionalProperties: q.AdditionalProperties, + DisableMetricsCollection: q.DisableMetricsCollection, + MaxConcurrentConnections: q.MaxConcurrentConnections, SourceRetryCount: q.SourceRetryCount, SourceRetryWait: q.SourceRetryWait, - MaxConcurrentConnections: q.MaxConcurrentConnections, - DisableMetricsCollection: q.DisableMetricsCollection, - AdditionalProperties: q.AdditionalProperties, + Type: q.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type QuickBooksSource. func (q *QuickBooksSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: q.QueryTimeout, AdditionalColumns: q.AdditionalColumns, - Type: q.Type, + AdditionalProperties: q.AdditionalProperties, + DisableMetricsCollection: q.DisableMetricsCollection, + MaxConcurrentConnections: q.MaxConcurrentConnections, + QueryTimeout: q.QueryTimeout, SourceRetryCount: q.SourceRetryCount, SourceRetryWait: q.SourceRetryWait, - MaxConcurrentConnections: q.MaxConcurrentConnections, - DisableMetricsCollection: q.DisableMetricsCollection, - AdditionalProperties: q.AdditionalProperties, + Type: q.Type, } } @@ -21804,12 +21642,12 @@ type QuickbaseLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type QuickbaseLinkedService. func (q *QuickbaseLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: q.Type, + AdditionalProperties: q.AdditionalProperties, + Annotations: q.Annotations, ConnectVia: q.ConnectVia, Description: q.Description, Parameters: q.Parameters, - Annotations: q.Annotations, - AdditionalProperties: q.AdditionalProperties, + Type: q.Type, } } @@ -21822,8 +21660,8 @@ type QuickbaseLinkedServiceTypeProperties struct { UserToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // RecurrenceSchedule - The recurrence schedule. @@ -21918,12 +21756,12 @@ type RelationalSource struct { // GetCopySource implements the CopySourceClassification interface for type RelationalSource. func (r *RelationalSource) GetCopySource() *CopySource { return &CopySource{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + DisableMetricsCollection: r.DisableMetricsCollection, + MaxConcurrentConnections: r.MaxConcurrentConnections, SourceRetryCount: r.SourceRetryCount, SourceRetryWait: r.SourceRetryWait, - MaxConcurrentConnections: r.MaxConcurrentConnections, - DisableMetricsCollection: r.DisableMetricsCollection, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } @@ -21964,15 +21802,15 @@ type RelationalTableDataset struct { // GetDataset implements the DatasetClassification interface for type RelationalTableDataset. func (r *RelationalTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + Annotations: r.Annotations, Description: r.Description, - Structure: r.Structure, - Schema: r.Schema, + Folder: r.Folder, LinkedServiceName: r.LinkedServiceName, Parameters: r.Parameters, - Annotations: r.Annotations, - Folder: r.Folder, - AdditionalProperties: r.AdditionalProperties, + Schema: r.Schema, + Structure: r.Structure, + Type: r.Type, } } @@ -22019,11 +21857,11 @@ type RerunTumblingWindowTrigger struct { // GetTrigger implements the TriggerClassification interface for type RerunTumblingWindowTrigger. func (r *RerunTumblingWindowTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + Annotations: r.Annotations, Description: r.Description, RuntimeState: r.RuntimeState, - Annotations: r.Annotations, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } @@ -22069,12 +21907,12 @@ type ResponsysLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ResponsysLinkedService. func (r *ResponsysLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + Annotations: r.Annotations, ConnectVia: r.ConnectVia, Description: r.Description, Parameters: r.Parameters, - Annotations: r.Annotations, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } @@ -22090,8 +21928,8 @@ type ResponsysLinkedServiceTypeProperties struct { ClientSecret SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression // with resultType boolean). @@ -22144,15 +21982,15 @@ type ResponsysObjectDataset struct { // GetDataset implements the DatasetClassification interface for type ResponsysObjectDataset. func (r *ResponsysObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + Annotations: r.Annotations, Description: r.Description, - Structure: r.Structure, - Schema: r.Schema, + Folder: r.Folder, LinkedServiceName: r.LinkedServiceName, Parameters: r.Parameters, - Annotations: r.Annotations, - Folder: r.Folder, - AdditionalProperties: r.AdditionalProperties, + Schema: r.Schema, + Structure: r.Structure, + Type: r.Type, } } @@ -22190,26 +22028,26 @@ type ResponsysSource struct { // GetCopySource implements the CopySourceClassification interface for type ResponsysSource. func (r *ResponsysSource) GetCopySource() *CopySource { return &CopySource{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + DisableMetricsCollection: r.DisableMetricsCollection, + MaxConcurrentConnections: r.MaxConcurrentConnections, SourceRetryCount: r.SourceRetryCount, SourceRetryWait: r.SourceRetryWait, - MaxConcurrentConnections: r.MaxConcurrentConnections, - DisableMetricsCollection: r.DisableMetricsCollection, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type ResponsysSource. func (r *ResponsysSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: r.QueryTimeout, AdditionalColumns: r.AdditionalColumns, - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + DisableMetricsCollection: r.DisableMetricsCollection, + MaxConcurrentConnections: r.MaxConcurrentConnections, + QueryTimeout: r.QueryTimeout, SourceRetryCount: r.SourceRetryCount, SourceRetryWait: r.SourceRetryWait, - MaxConcurrentConnections: r.MaxConcurrentConnections, - DisableMetricsCollection: r.DisableMetricsCollection, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } @@ -22250,25 +22088,25 @@ type RestResourceDataset struct { // GetDataset implements the DatasetClassification interface for type RestResourceDataset. func (r *RestResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + Annotations: r.Annotations, Description: r.Description, - Structure: r.Structure, - Schema: r.Schema, + Folder: r.Folder, LinkedServiceName: r.LinkedServiceName, Parameters: r.Parameters, - Annotations: r.Annotations, - Folder: r.Folder, - AdditionalProperties: r.AdditionalProperties, + Schema: r.Schema, + Structure: r.Structure, + Type: r.Type, } } // RestResourceDatasetTypeProperties - Properties specific to this dataset type. type RestResourceDatasetTypeProperties struct { - // The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). - AdditionalHeaders any + // The additional HTTP headers in the request to the RESTful API. + AdditionalHeaders map[string]any - // The pagination rules to compose next page requests. Type: string (or Expression with resultType string). - PaginationRules any + // The pagination rules to compose next page requests. + PaginationRules map[string]any // The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). RelativeURL any @@ -22307,12 +22145,12 @@ type RestServiceLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type RestServiceLinkedService. func (r *RestServiceLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + Annotations: r.Annotations, ConnectVia: r.ConnectVia, Description: r.Description, Parameters: r.Parameters, - Annotations: r.Annotations, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } @@ -22321,10 +22159,10 @@ type RestServiceLinkedServiceTypeProperties struct { // REQUIRED; Type of authentication used to connect to the REST service. AuthenticationType *RestServiceAuthenticationType - // REQUIRED; The base URL of the REST service. + // REQUIRED; The base URL of the REST service. Type: string (or Expression with resultType string). URL any - // The resource you are requesting authorization to use. + // The resource you are requesting authorization to use. Type: string (or Expression with resultType string). AADResourceID any // The additional HTTP headers in the request to RESTful API used for authorization. Type: object (or Expression with resultType @@ -22350,8 +22188,8 @@ type RestServiceLinkedServiceTypeProperties struct { EnableServerCertificateValidation any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password used in Basic authentication type. Password SecretBaseClassification @@ -22363,20 +22201,21 @@ type RestServiceLinkedServiceTypeProperties struct { // resultType string). Scope any - // The application's client ID used in AadServicePrincipal authentication type. + // The application's client ID used in AadServicePrincipal authentication type. Type: string (or Expression with resultType + // string). ServicePrincipalID any // The application's key used in AadServicePrincipal authentication type. ServicePrincipalKey SecretBaseClassification // The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application - // resides. + // resides. Type: string (or Expression with resultType string). Tenant any // The token endpoint of the authorization server to acquire access token. Type: string (or Expression with resultType string). TokenEndpoint any - // The user name used in Basic authentication type. + // The user name used in Basic authentication type. Type: string (or Expression with resultType string). UserName any } @@ -22428,14 +22267,14 @@ type RestSink struct { // GetCopySink implements the CopySinkClassification interface for type RestSink. func (r *RestSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: r.AdditionalProperties, + DisableMetricsCollection: r.DisableMetricsCollection, + MaxConcurrentConnections: r.MaxConcurrentConnections, + SinkRetryCount: r.SinkRetryCount, + SinkRetryWait: r.SinkRetryWait, Type: r.Type, WriteBatchSize: r.WriteBatchSize, WriteBatchTimeout: r.WriteBatchTimeout, - SinkRetryCount: r.SinkRetryCount, - SinkRetryWait: r.SinkRetryWait, - MaxConcurrentConnections: r.MaxConcurrentConnections, - DisableMetricsCollection: r.DisableMetricsCollection, - AdditionalProperties: r.AdditionalProperties, } } @@ -22487,12 +22326,12 @@ type RestSource struct { // GetCopySource implements the CopySourceClassification interface for type RestSource. func (r *RestSource) GetCopySource() *CopySource { return &CopySource{ - Type: r.Type, + AdditionalProperties: r.AdditionalProperties, + DisableMetricsCollection: r.DisableMetricsCollection, + MaxConcurrentConnections: r.MaxConcurrentConnections, SourceRetryCount: r.SourceRetryCount, SourceRetryWait: r.SourceRetryWait, - MaxConcurrentConnections: r.MaxConcurrentConnections, - DisableMetricsCollection: r.DisableMetricsCollection, - AdditionalProperties: r.AdditionalProperties, + Type: r.Type, } } @@ -22550,7 +22389,7 @@ type RunQueryOrderBy struct { // SQLAlwaysEncryptedProperties - Sql always encrypted properties. type SQLAlwaysEncryptedProperties struct { - // REQUIRED; Sql always encrypted AKV authentication type. Type: string (or Expression with resultType string). + // REQUIRED; Sql always encrypted AKV authentication type. Type: string. AlwaysEncryptedAkvAuthType *SQLAlwaysEncryptedAkvAuthType // The credential reference containing authentication information. @@ -22623,14 +22462,14 @@ type SQLDWSink struct { // GetCopySink implements the CopySinkClassification interface for type SQLDWSink. func (s *SQLDWSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -22649,6 +22488,11 @@ type SQLDWSource struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any + // Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. + // The default value is ReadCommitted. Type: string (or + // Expression with resultType string). + IsolationLevel any + // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any @@ -22683,26 +22527,26 @@ type SQLDWSource struct { // GetCopySource implements the CopySourceClassification interface for type SQLDWSource. func (s *SQLDWSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SQLDWSource. func (s *SQLDWSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -22773,14 +22617,14 @@ type SQLMISink struct { // GetCopySink implements the CopySinkClassification interface for type SQLMISink. func (s *SQLMISink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -22799,6 +22643,11 @@ type SQLMISource struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any + // Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. + // The default value is ReadCommitted. Type: string (or + // Expression with resultType string). + IsolationLevel any + // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any @@ -22835,26 +22684,26 @@ type SQLMISource struct { // GetCopySource implements the CopySourceClassification interface for type SQLMISource. func (s *SQLMISource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SQLMISource. func (s *SQLMISource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -22903,12 +22752,12 @@ type SQLServerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SQLServerLinkedService. func (s *SQLServerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -22921,8 +22770,8 @@ type SQLServerLinkedServiceTypeProperties struct { AlwaysEncryptedSettings *SQLAlwaysEncryptedProperties // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The on-premises Windows authentication password. Password SecretBaseClassification @@ -22989,14 +22838,14 @@ type SQLServerSink struct { // GetCopySink implements the CopySinkClassification interface for type SQLServerSink. func (s *SQLServerSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -23015,6 +22864,11 @@ type SQLServerSource struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any + // Specifies the transaction locking behavior for the SQL source. Allowed values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. + // The default value is ReadCommitted. Type: string (or + // Expression with resultType string). + IsolationLevel any + // The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). MaxConcurrentConnections any @@ -23051,26 +22905,26 @@ type SQLServerSource struct { // GetCopySource implements the CopySourceClassification interface for type SQLServerSource. func (s *SQLServerSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SQLServerSource. func (s *SQLServerSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -23097,9 +22951,16 @@ type SQLServerStoredProcedureActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -23107,26 +22968,30 @@ type SQLServerStoredProcedureActivity struct { // GetActivity implements the ActivityClassification interface for type SQLServerStoredProcedureActivity. func (s *SQLServerStoredProcedureActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type SQLServerStoredProcedureActivity. func (s *SQLServerStoredProcedureActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, LinkedServiceName: s.LinkedServiceName, - Policy: s.Policy, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + Policy: s.Policy, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } @@ -23176,15 +23041,15 @@ type SQLServerTableDataset struct { // GetDataset implements the DatasetClassification interface for type SQLServerTableDataset. func (s *SQLServerTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -23258,14 +23123,14 @@ type SQLSink struct { // GetCopySink implements the CopySinkClassification interface for type SQLSink. func (s *SQLSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -23322,26 +23187,26 @@ type SQLSource struct { // GetCopySource implements the CopySourceClassification interface for type SQLSource. func (s *SQLSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SQLSource. func (s *SQLSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -23359,13 +23224,13 @@ type SQLUpsertSettings struct { // SSISAccessCredential - SSIS access credential. type SSISAccessCredential struct { - // REQUIRED; Domain for windows authentication. + // REQUIRED; Domain for windows authentication. Type: string (or Expression with resultType string). Domain any // REQUIRED; Password for windows authentication. Password SecretBaseClassification - // REQUIRED; UseName for windows authentication. + // REQUIRED; UseName for windows authentication. Type: string (or Expression with resultType string). UserName any } @@ -23386,13 +23251,13 @@ type SSISChildPackage struct { // SSISExecutionCredential - SSIS package execution credential. type SSISExecutionCredential struct { - // REQUIRED; Domain for windows authentication. + // REQUIRED; Domain for windows authentication. Type: string (or Expression with resultType string). Domain any // REQUIRED; Password for windows authentication. Password *SecureString - // REQUIRED; UseName for windows authentication. + // REQUIRED; UseName for windows authentication. Type: string (or Expression with resultType string). UserName any } @@ -23499,12 +23364,12 @@ type SalesforceLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SalesforceLinkedService. func (s *SalesforceLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -23514,8 +23379,8 @@ type SalesforceLinkedServiceTypeProperties struct { APIVersion any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. // To copy data from custom domain, specify, for example, @@ -23559,12 +23424,12 @@ type SalesforceMarketingCloudLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SalesforceMarketingCloudLinkedService. func (s *SalesforceMarketingCloudLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -23582,8 +23447,8 @@ type SalesforceMarketingCloudLinkedServiceTypeProperties struct { ConnectionProperties any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression // with resultType boolean). @@ -23636,15 +23501,15 @@ type SalesforceMarketingCloudObjectDataset struct { // GetDataset implements the DatasetClassification interface for type SalesforceMarketingCloudObjectDataset. func (s *SalesforceMarketingCloudObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -23682,26 +23547,26 @@ type SalesforceMarketingCloudSource struct { // GetCopySource implements the CopySourceClassification interface for type SalesforceMarketingCloudSource. func (s *SalesforceMarketingCloudSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SalesforceMarketingCloudSource. func (s *SalesforceMarketingCloudSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -23742,15 +23607,15 @@ type SalesforceObjectDataset struct { // GetDataset implements the DatasetClassification interface for type SalesforceObjectDataset. func (s *SalesforceObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -23787,12 +23652,12 @@ type SalesforceServiceCloudLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SalesforceServiceCloudLinkedService. func (s *SalesforceServiceCloudLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -23802,8 +23667,8 @@ type SalesforceServiceCloudLinkedServiceTypeProperties struct { APIVersion any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify // 'https://test.salesforce.com'. To copy data from custom domain, specify, for @@ -23860,15 +23725,15 @@ type SalesforceServiceCloudObjectDataset struct { // GetDataset implements the DatasetClassification interface for type SalesforceServiceCloudObjectDataset. func (s *SalesforceServiceCloudObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -23923,14 +23788,14 @@ type SalesforceServiceCloudSink struct { // GetCopySink implements the CopySinkClassification interface for type SalesforceServiceCloudSink. func (s *SalesforceServiceCloudSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -23955,8 +23820,9 @@ type SalesforceServiceCloudSource struct { // Database query. Type: string (or Expression with resultType string). Query any - // The read behavior for the operation. Default is Query. - ReadBehavior *SalesforceSourceReadBehavior + // The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with + // resultType string). + ReadBehavior any // Source retry count. Type: integer (or Expression with resultType integer). SourceRetryCount any @@ -23968,12 +23834,12 @@ type SalesforceServiceCloudSource struct { // GetCopySource implements the CopySourceClassification interface for type SalesforceServiceCloudSource. func (s *SalesforceServiceCloudSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24022,14 +23888,14 @@ type SalesforceSink struct { // GetCopySink implements the CopySinkClassification interface for type SalesforceSink. func (s *SalesforceSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -24057,8 +23923,9 @@ type SalesforceSource struct { // Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). QueryTimeout any - // The read behavior for the operation. Default is Query. - ReadBehavior *SalesforceSourceReadBehavior + // The read behavior for the operation. Default is Query. Allowed values: Query/QueryAll. Type: string (or Expression with + // resultType string). + ReadBehavior any // Source retry count. Type: integer (or Expression with resultType integer). SourceRetryCount any @@ -24070,26 +23937,26 @@ type SalesforceSource struct { // GetCopySource implements the CopySourceClassification interface for type SalesforceSource. func (s *SalesforceSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SalesforceSource. func (s *SalesforceSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24120,12 +23987,12 @@ type SapBWLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapBWLinkedService. func (s *SapBWLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24143,8 +24010,8 @@ type SapBWLinkedServiceTypeProperties struct { SystemNumber any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password to access the SAP BW server. Password SecretBaseClassification @@ -24187,15 +24054,15 @@ type SapBwCubeDataset struct { // GetDataset implements the DatasetClassification interface for type SapBwCubeDataset. func (s *SapBwCubeDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -24233,26 +24100,26 @@ type SapBwSource struct { // GetCopySource implements the CopySourceClassification interface for type SapBwSource. func (s *SapBwSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapBwSource. func (s *SapBwSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24283,12 +24150,12 @@ type SapCloudForCustomerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapCloudForCustomerLinkedService. func (s *SapCloudForCustomerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24300,8 +24167,8 @@ type SapCloudForCustomerLinkedServiceTypeProperties struct { // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. // Either encryptedCredential or username/password must be provided. Type: - // string (or Expression with resultType string). - EncryptedCredential any + // string. + EncryptedCredential *string // The password for Basic authentication. Password SecretBaseClassification @@ -24347,15 +24214,15 @@ type SapCloudForCustomerResourceDataset struct { // GetDataset implements the DatasetClassification interface for type SapCloudForCustomerResourceDataset. func (s *SapCloudForCustomerResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -24403,14 +24270,14 @@ type SapCloudForCustomerSink struct { // GetCopySink implements the CopySinkClassification interface for type SapCloudForCustomerSink. func (s *SapCloudForCustomerSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -24453,26 +24320,26 @@ type SapCloudForCustomerSource struct { // GetCopySource implements the CopySourceClassification interface for type SapCloudForCustomerSource. func (s *SapCloudForCustomerSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapCloudForCustomerSource. func (s *SapCloudForCustomerSource) GetTabularSource() *TabularSource { return &TabularSource{ + AdditionalColumns: s.AdditionalColumns, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, QueryTimeout: s.QueryTimeout, - AdditionalColumns: s.AdditionalColumns, - Type: s.Type, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24503,12 +24370,12 @@ type SapEccLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapEccLinkedService. func (s *SapEccLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24516,18 +24383,18 @@ func (s *SapEccLinkedService) GetLinkedService() *LinkedService { type SapEccLinkedServiceTypeProperties struct { // REQUIRED; The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string // (or Expression with resultType string). - URL *string + URL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. // Either encryptedCredential or username/password must be provided. Type: - // string (or Expression with resultType string). + // string. EncryptedCredential *string // The password for Basic authentication. Password SecretBaseClassification // The username for Basic authentication. Type: string (or Expression with resultType string). - Username *string + Username any } // SapEccResourceDataset - The path of the SAP ECC OData entity. @@ -24567,15 +24434,15 @@ type SapEccResourceDataset struct { // GetDataset implements the DatasetClassification interface for type SapEccResourceDataset. func (s *SapEccResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -24624,26 +24491,26 @@ type SapEccSource struct { // GetCopySource implements the CopySourceClassification interface for type SapEccSource. func (s *SapEccSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapEccSource. func (s *SapEccSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24674,12 +24541,12 @@ type SapHanaLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapHanaLinkedService. func (s *SapHanaLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24692,8 +24559,8 @@ type SapHanaLinkedServiceProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password to access the SAP HANA server. Password SecretBaseClassification @@ -24756,26 +24623,26 @@ type SapHanaSource struct { // GetCopySource implements the CopySourceClassification interface for type SapHanaSource. func (s *SapHanaSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapHanaSource. func (s *SapHanaSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24816,15 +24683,15 @@ type SapHanaTableDataset struct { // GetDataset implements the DatasetClassification interface for type SapHanaTableDataset. func (s *SapHanaTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -24864,12 +24731,12 @@ type SapOdpLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapOdpLinkedService. func (s *SapOdpLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -24880,8 +24747,8 @@ type SapOdpLinkedServiceTypeProperties struct { ClientID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType // string). @@ -24974,15 +24841,15 @@ type SapOdpResourceDataset struct { // GetDataset implements the DatasetClassification interface for type SapOdpResourceDataset. func (s *SapOdpResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -25041,26 +24908,26 @@ type SapOdpSource struct { // GetCopySource implements the CopySourceClassification interface for type SapOdpSource. func (s *SapOdpSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapOdpSource. func (s *SapOdpSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25091,12 +24958,12 @@ type SapOpenHubLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapOpenHubLinkedService. func (s *SapOpenHubLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25108,8 +24975,8 @@ type SapOpenHubLinkedServiceTypeProperties struct { ClientID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression // with resultType string). @@ -25191,26 +25058,26 @@ type SapOpenHubSource struct { // GetCopySource implements the CopySourceClassification interface for type SapOpenHubSource. func (s *SapOpenHubSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapOpenHubSource. func (s *SapOpenHubSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25251,15 +25118,15 @@ type SapOpenHubTableDataset struct { // GetDataset implements the DatasetClassification interface for type SapOpenHubTableDataset. func (s *SapOpenHubTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -25306,12 +25173,12 @@ type SapTableLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SapTableLinkedService. func (s *SapTableLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25322,8 +25189,8 @@ type SapTableLinkedServiceTypeProperties struct { ClientID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType // string). @@ -25428,15 +25295,15 @@ type SapTableResourceDataset struct { // GetDataset implements the DatasetClassification interface for type SapTableResourceDataset. func (s *SapTableResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -25511,26 +25378,26 @@ type SapTableSource struct { // GetCopySource implements the CopySourceClassification interface for type SapTableSource. func (s *SapTableSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SapTableSource. func (s *SapTableSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25561,23 +25428,23 @@ type ScheduleTrigger struct { // GetMultiplePipelineTrigger implements the MultiplePipelineTriggerClassification interface for type ScheduleTrigger. func (s *ScheduleTrigger) GetMultiplePipelineTrigger() *MultiplePipelineTrigger { return &MultiplePipelineTrigger{ - Pipelines: s.Pipelines, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, + Pipelines: s.Pipelines, RuntimeState: s.RuntimeState, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTrigger implements the TriggerClassification interface for type ScheduleTrigger. func (s *ScheduleTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, RuntimeState: s.RuntimeState, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25649,9 +25516,16 @@ type ScriptActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -25659,26 +25533,30 @@ type ScriptActivity struct { // GetActivity implements the ActivityClassification interface for type ScriptActivity. func (s *ScriptActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type ScriptActivity. func (s *ScriptActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, LinkedServiceName: s.LinkedServiceName, - Policy: s.Policy, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + Policy: s.Policy, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } @@ -25696,7 +25574,7 @@ type ScriptActivityParameter struct { // The type of the parameter. Type *ScriptActivityParameterType - // The value of the parameter. + // The value of the parameter. Type: string (or Expression with resultType string). Value any } @@ -25733,15 +25611,6 @@ type ScriptActivityTypePropertiesLogSettings struct { LogLocationSettings *LogLocationSettings } -// SecretBaseClassification provides polymorphic access to related types. -// Call the interface's GetSecretBase() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AzureKeyVaultSecretReference, *SecretBase, *SecureString -type SecretBaseClassification interface { - // GetSecretBase returns the SecretBase content of the underlying type. - GetSecretBase() *SecretBase -} - // SecretBase - The base definition of a secret type. type SecretBase struct { // REQUIRED; Type of the secret. @@ -25751,6 +25620,15 @@ type SecretBase struct { // GetSecretBase implements the SecretBaseClassification interface for type SecretBase. func (s *SecretBase) GetSecretBase() *SecretBase { return s } +// SecureInputOutputPolicy - Execution policy for an activity that supports secure input and output. +type SecureInputOutputPolicy struct { + // When set to true, Input from activity is considered as secure and will not be logged to monitoring. + SecureInput *bool + + // When set to true, Output from activity is considered as secure and will not be logged to monitoring. + SecureOutput *bool +} + // SecureString - Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get // or List API calls. type SecureString struct { @@ -25806,9 +25684,9 @@ type SelfHostedIntegrationRuntime struct { // GetIntegrationRuntime implements the IntegrationRuntimeClassification interface for type SelfHostedIntegrationRuntime. func (s *SelfHostedIntegrationRuntime) GetIntegrationRuntime() *IntegrationRuntime { return &IntegrationRuntime{ - Type: s.Type, - Description: s.Description, AdditionalProperties: s.AdditionalProperties, + Description: s.Description, + Type: s.Type, } } @@ -25893,10 +25771,10 @@ type SelfHostedIntegrationRuntimeStatus struct { // GetIntegrationRuntimeStatus implements the IntegrationRuntimeStatusClassification interface for type SelfHostedIntegrationRuntimeStatus. func (s *SelfHostedIntegrationRuntimeStatus) GetIntegrationRuntimeStatus() *IntegrationRuntimeStatus { return &IntegrationRuntimeStatus{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, DataFactoryName: s.DataFactoryName, State: s.State, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -25936,6 +25814,10 @@ type SelfHostedIntegrationRuntimeStatusTypeProperties struct { // READ-ONLY; The date at which the integration runtime will be scheduled to update, in ISO8601 format. ScheduledUpdateDate *time.Time + // READ-ONLY; An alternative option to ensure interactive authoring function when your self-hosted integration runtime is + // unable to establish a connection with Azure Relay. + SelfContainedInteractiveAuthoringEnabled *bool + // READ-ONLY; The URLs for the services used in integration runtime backend service. ServiceUrls []*string @@ -25956,6 +25838,10 @@ type SelfHostedIntegrationRuntimeStatusTypeProperties struct { type SelfHostedIntegrationRuntimeTypeProperties struct { // The base definition of a linked integration runtime. LinkedInfo LinkedIntegrationRuntimeTypeClassification + + // An alternative option to ensure interactive authoring function when your self-hosted integration runtime is unable to establish + // a connection with Azure Relay. + SelfContainedInteractiveAuthoringEnabled *bool } // ServiceNowLinkedService - ServiceNow server linked service. @@ -25985,12 +25871,12 @@ type ServiceNowLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ServiceNowLinkedService. func (s *ServiceNowLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26009,8 +25895,8 @@ type ServiceNowLinkedServiceTypeProperties struct { ClientSecret SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password corresponding to the user name for Basic and OAuth2 authentication. Password SecretBaseClassification @@ -26066,15 +25952,15 @@ type ServiceNowObjectDataset struct { // GetDataset implements the DatasetClassification interface for type ServiceNowObjectDataset. func (s *ServiceNowObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -26112,26 +25998,26 @@ type ServiceNowSource struct { // GetCopySource implements the CopySourceClassification interface for type ServiceNowSource. func (s *ServiceNowSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type ServiceNowSource. func (s *ServiceNowSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26156,10 +26042,10 @@ type ServicePrincipalCredential struct { // GetCredential implements the CredentialClassification interface for type ServicePrincipalCredential. func (s *ServicePrincipalCredential) GetCredential() *Credential { return &Credential{ - Type: s.Type, - Description: s.Description, - Annotations: s.Annotations, AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, + Description: s.Description, + Type: s.Type, } } @@ -26195,6 +26081,16 @@ type SetVariableActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity policy. + Policy *SecureInputOutputPolicy + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -26202,30 +26098,37 @@ type SetVariableActivity struct { // GetActivity implements the ActivityClassification interface for type SetVariableActivity. func (s *SetVariableActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type SetVariableActivity. func (s *SetVariableActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // SetVariableActivityTypeProperties - SetVariable activity properties. type SetVariableActivityTypeProperties struct { - // Value to be set. Could be a static value or Expression + // If set to true, it sets the pipeline run return value. + SetSystemVariable *bool + + // Value to be set. Could be a static value or Expression. Value any // Name of the variable whose value needs to be set. @@ -26250,10 +26153,10 @@ type SftpLocation struct { // GetDatasetLocation implements the DatasetLocationClassification interface for type SftpLocation. func (s *SftpLocation) GetDatasetLocation() *DatasetLocation { return &DatasetLocation{ - Type: s.Type, - FolderPath: s.FolderPath, - FileName: s.FileName, AdditionalProperties: s.AdditionalProperties, + FileName: s.FileName, + FolderPath: s.FolderPath, + Type: s.Type, } } @@ -26275,8 +26178,8 @@ type SftpReadSettings struct { // If true, disable data store metrics collection. Default is false. Type: boolean (or Expression with resultType boolean). DisableMetricsCollection any - // Indicates whether to enable partition discovery. - EnablePartitionDiscovery *bool + // Indicates whether to enable partition discovery. Type: boolean (or Expression with resultType boolean). + EnablePartitionDiscovery any // Point to a text file that lists each file (relative path to the path configured in the dataset) that you want to copy. // Type: string (or Expression with resultType string). @@ -26308,10 +26211,10 @@ type SftpReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type SftpReadSettings. func (s *SftpReadSettings) GetStoreReadSettings() *StoreReadSettings { return &StoreReadSettings{ - Type: s.Type, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + Type: s.Type, } } @@ -26342,12 +26245,12 @@ type SftpServerLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SftpServerLinkedService. func (s *SftpServerLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26360,8 +26263,8 @@ type SftpServerLinkedServiceTypeProperties struct { AuthenticationType *SftpAuthenticationType // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. // Type: string (or Expression with resultType string). @@ -26423,11 +26326,11 @@ type SftpWriteSettings struct { // GetStoreWriteSettings implements the StoreWriteSettingsClassification interface for type SftpWriteSettings. func (s *SftpWriteSettings) GetStoreWriteSettings() *StoreWriteSettings { return &StoreWriteSettings{ - Type: s.Type, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - CopyBehavior: s.CopyBehavior, AdditionalProperties: s.AdditionalProperties, + CopyBehavior: s.CopyBehavior, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + Type: s.Type, } } @@ -26464,12 +26367,12 @@ type SharePointOnlineListLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SharePointOnlineListLinkedService. func (s *SharePointOnlineListLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26493,8 +26396,8 @@ type SharePointOnlineListLinkedServiceTypeProperties struct { TenantID any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // SharePointOnlineListResourceDataset - The sharepoint online list resource dataset. @@ -26534,15 +26437,15 @@ type SharePointOnlineListResourceDataset struct { // GetDataset implements the DatasetClassification interface for type SharePointOnlineListResourceDataset. func (s *SharePointOnlineListResourceDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -26579,12 +26482,12 @@ type SharePointOnlineListSource struct { // GetCopySource implements the CopySourceClassification interface for type SharePointOnlineListSource. func (s *SharePointOnlineListSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26615,12 +26518,12 @@ type ShopifyLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ShopifyLinkedService. func (s *ShopifyLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26633,8 +26536,8 @@ type ShopifyLinkedServiceTypeProperties struct { AccessToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. UseEncryptedEndpoints any @@ -26684,15 +26587,15 @@ type ShopifyObjectDataset struct { // GetDataset implements the DatasetClassification interface for type ShopifyObjectDataset. func (s *ShopifyObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -26730,26 +26633,26 @@ type ShopifySource struct { // GetCopySource implements the CopySourceClassification interface for type ShopifySource. func (s *ShopifySource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type ShopifySource. func (s *ShopifySource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26790,12 +26693,12 @@ type SmartsheetLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SmartsheetLinkedService. func (s *SmartsheetLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26805,8 +26708,8 @@ type SmartsheetLinkedServiceTypeProperties struct { APIToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string } // SnowflakeDataset - The snowflake dataset. @@ -26846,15 +26749,15 @@ type SnowflakeDataset struct { // GetDataset implements the DatasetClassification interface for type SnowflakeDataset. func (s *SnowflakeDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -26889,8 +26792,8 @@ type SnowflakeExportCopyCommand struct { // GetExportSettings implements the ExportSettingsClassification interface for type SnowflakeExportCopyCommand. func (s *SnowflakeExportCopyCommand) GetExportSettings() *ExportSettings { return &ExportSettings{ - Type: s.Type, AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26916,8 +26819,8 @@ type SnowflakeImportCopyCommand struct { // GetImportSettings implements the ImportSettingsClassification interface for type SnowflakeImportCopyCommand. func (s *SnowflakeImportCopyCommand) GetImportSettings() *ImportSettings { return &ImportSettings{ - Type: s.Type, AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26948,12 +26851,12 @@ type SnowflakeLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SnowflakeLinkedService. func (s *SnowflakeLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -26963,8 +26866,8 @@ type SnowflakeLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Password *AzureKeyVaultSecretReference @@ -27006,14 +26909,14 @@ type SnowflakeSink struct { // GetCopySink implements the CopySinkClassification interface for type SnowflakeSink. func (s *SnowflakeSink) GetCopySink() *CopySink { return &CopySink{ + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + SinkRetryCount: s.SinkRetryCount, + SinkRetryWait: s.SinkRetryWait, Type: s.Type, WriteBatchSize: s.WriteBatchSize, WriteBatchTimeout: s.WriteBatchTimeout, - SinkRetryCount: s.SinkRetryCount, - SinkRetryWait: s.SinkRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, } } @@ -27047,12 +26950,12 @@ type SnowflakeSource struct { // GetCopySource implements the CopySourceClassification interface for type SnowflakeSource. func (s *SnowflakeSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -27104,12 +27007,12 @@ type SparkLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SparkLinkedService. func (s *SparkLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -27135,8 +27038,8 @@ type SparkLinkedServiceTypeProperties struct { EnableSSL any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The partial URL corresponding to the Spark server. HTTPPath any @@ -27200,15 +27103,15 @@ type SparkObjectDataset struct { // GetDataset implements the DatasetClassification interface for type SparkObjectDataset. func (s *SparkObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -27246,26 +27149,26 @@ type SparkSource struct { // GetCopySource implements the CopySourceClassification interface for type SparkSource. func (s *SparkSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SparkSource. func (s *SparkSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -27296,12 +27199,12 @@ type SquareLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SquareLinkedService. func (s *SquareLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -27317,8 +27220,8 @@ type SquareLinkedServiceTypeProperties struct { ConnectionProperties any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The URL of the Square instance. (i.e. mystore.mysquare.com) Host any @@ -27374,15 +27277,15 @@ type SquareObjectDataset struct { // GetDataset implements the DatasetClassification interface for type SquareObjectDataset. func (s *SquareObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -27420,26 +27323,26 @@ type SquareSource struct { // GetCopySource implements the CopySourceClassification interface for type SquareSource. func (s *SquareSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SquareSource. func (s *SquareSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -27467,10 +27370,10 @@ type SsisEnvironment struct { // GetSsisObjectMetadata implements the SsisObjectMetadataClassification interface for type SsisEnvironment. func (s *SsisEnvironment) GetSsisObjectMetadata() *SsisObjectMetadata { return &SsisObjectMetadata{ - Type: s.Type, + Description: s.Description, ID: s.ID, Name: s.Name, - Description: s.Description, + Type: s.Type, } } @@ -27507,22 +27410,13 @@ type SsisFolder struct { // GetSsisObjectMetadata implements the SsisObjectMetadataClassification interface for type SsisFolder. func (s *SsisFolder) GetSsisObjectMetadata() *SsisObjectMetadata { return &SsisObjectMetadata{ - Type: s.Type, + Description: s.Description, ID: s.ID, Name: s.Name, - Description: s.Description, + Type: s.Type, } } -// SsisObjectMetadataClassification provides polymorphic access to related types. -// Call the interface's GetSsisObjectMetadata() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *SsisEnvironment, *SsisFolder, *SsisObjectMetadata, *SsisPackage, *SsisProject -type SsisObjectMetadataClassification interface { - // GetSsisObjectMetadata returns the SsisObjectMetadata content of the underlying type. - GetSsisObjectMetadata() *SsisObjectMetadata -} - // SsisObjectMetadata - SSIS object metadata. type SsisObjectMetadata struct { // REQUIRED; Type of metadata. @@ -27595,10 +27489,10 @@ type SsisPackage struct { // GetSsisObjectMetadata implements the SsisObjectMetadataClassification interface for type SsisPackage. func (s *SsisPackage) GetSsisObjectMetadata() *SsisObjectMetadata { return &SsisObjectMetadata{ - Type: s.Type, + Description: s.Description, ID: s.ID, Name: s.Name, - Description: s.Description, + Type: s.Type, } } @@ -27671,10 +27565,10 @@ type SsisProject struct { // GetSsisObjectMetadata implements the SsisObjectMetadataClassification interface for type SsisProject. func (s *SsisProject) GetSsisObjectMetadata() *SsisObjectMetadata { return &SsisObjectMetadata{ - Type: s.Type, + Description: s.Description, ID: s.ID, Name: s.Name, - Description: s.Description, + Type: s.Type, } } @@ -27718,17 +27612,6 @@ type StagingSettings struct { Path any } -// StoreReadSettingsClassification provides polymorphic access to related types. -// Call the interface's GetStoreReadSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AmazonS3CompatibleReadSettings, *AmazonS3ReadSettings, *AzureBlobFSReadSettings, *AzureBlobStorageReadSettings, *AzureDataLakeStoreReadSettings, -// - *AzureFileStorageReadSettings, *FileServerReadSettings, *FtpReadSettings, *GoogleCloudStorageReadSettings, *HTTPReadSettings, -// - *HdfsReadSettings, *OracleCloudStorageReadSettings, *SftpReadSettings, *StoreReadSettings -type StoreReadSettingsClassification interface { - // GetStoreReadSettings returns the StoreReadSettings content of the underlying type. - GetStoreReadSettings() *StoreReadSettings -} - // StoreReadSettings - Connector read setting. type StoreReadSettings struct { // REQUIRED; The read setting type. @@ -27747,16 +27630,6 @@ type StoreReadSettings struct { // GetStoreReadSettings implements the StoreReadSettingsClassification interface for type StoreReadSettings. func (s *StoreReadSettings) GetStoreReadSettings() *StoreReadSettings { return s } -// StoreWriteSettingsClassification provides polymorphic access to related types. -// Call the interface's GetStoreWriteSettings() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AzureBlobFSWriteSettings, *AzureBlobStorageWriteSettings, *AzureDataLakeStoreWriteSettings, *AzureFileStorageWriteSettings, -// - *FileServerWriteSettings, *SftpWriteSettings, *StoreWriteSettings -type StoreWriteSettingsClassification interface { - // GetStoreWriteSettings returns the StoreWriteSettings content of the underlying type. - GetStoreWriteSettings() *StoreWriteSettings -} - // StoreWriteSettings - Connector write settings. type StoreWriteSettings struct { // REQUIRED; The write setting type. @@ -27799,6 +27672,13 @@ type SwitchActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -27806,24 +27686,28 @@ type SwitchActivity struct { // GetActivity implements the ActivityClassification interface for type SwitchActivity. func (s *SwitchActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type SwitchActivity. func (s *SwitchActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } @@ -27878,12 +27762,12 @@ type SybaseLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type SybaseLinkedService. func (s *SybaseLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, ConnectVia: s.ConnectVia, Description: s.Description, Parameters: s.Parameters, - Annotations: s.Annotations, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -27899,8 +27783,8 @@ type SybaseLinkedServiceTypeProperties struct { AuthenticationType *SybaseAuthenticationType // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for authentication. Password SecretBaseClassification @@ -27946,26 +27830,26 @@ type SybaseSource struct { // GetCopySource implements the CopySourceClassification interface for type SybaseSource. func (s *SybaseSource) GetCopySource() *CopySource { return &CopySource{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type SybaseSource. func (s *SybaseSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: s.QueryTimeout, AdditionalColumns: s.AdditionalColumns, - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + DisableMetricsCollection: s.DisableMetricsCollection, + MaxConcurrentConnections: s.MaxConcurrentConnections, + QueryTimeout: s.QueryTimeout, SourceRetryCount: s.SourceRetryCount, SourceRetryWait: s.SourceRetryWait, - MaxConcurrentConnections: s.MaxConcurrentConnections, - DisableMetricsCollection: s.DisableMetricsCollection, - AdditionalProperties: s.AdditionalProperties, + Type: s.Type, } } @@ -28006,15 +27890,15 @@ type SybaseTableDataset struct { // GetDataset implements the DatasetClassification interface for type SybaseTableDataset. func (s *SybaseTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: s.Type, + AdditionalProperties: s.AdditionalProperties, + Annotations: s.Annotations, Description: s.Description, - Structure: s.Structure, - Schema: s.Schema, + Folder: s.Folder, LinkedServiceName: s.LinkedServiceName, Parameters: s.Parameters, - Annotations: s.Annotations, - Folder: s.Folder, - AdditionalProperties: s.AdditionalProperties, + Schema: s.Schema, + Structure: s.Structure, + Type: s.Type, } } @@ -28047,9 +27931,16 @@ type SynapseNotebookActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -28057,26 +27948,30 @@ type SynapseNotebookActivity struct { // GetActivity implements the ActivityClassification interface for type SynapseNotebookActivity. func (s *SynapseNotebookActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type SynapseNotebookActivity. func (s *SynapseNotebookActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, LinkedServiceName: s.LinkedServiceName, - Policy: s.Policy, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + Policy: s.Policy, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } @@ -28088,6 +27983,9 @@ type SynapseNotebookActivityTypeProperties struct { // Spark configuration properties, which will override the 'conf' of the notebook you provide. Conf any + // The type of the spark config. + ConfigurationType *ConfigurationType + // Number of core and memory to be used for driver allocated in the specified Spark pool for the session, which will be used // for overriding 'driverCores' and 'driverMemory' of the notebook you provide. // Type: string (or Expression with resultType string). @@ -28098,14 +27996,21 @@ type SynapseNotebookActivityTypeProperties struct { // provide. Type: string (or Expression with resultType string). ExecutorSize any - // Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. - NumExecutors *int32 + // Number of executors to launch for this session, which will override the 'numExecutors' of the notebook you provide. Type: + // integer (or Expression with resultType integer). + NumExecutors any // Notebook parameters. Parameters map[string]*NotebookParameter + // Spark configuration property. + SparkConfig map[string]any + // The name of the big data pool which will be used to execute the notebook. SparkPool *BigDataPoolParametrizationReference + + // The spark configuration of the spark job. + TargetSparkConfiguration *SparkConfigurationParametrizationReference } // SynapseNotebookReference - Synapse notebook reference type. @@ -28205,9 +28110,16 @@ type SynapseSparkJobDefinitionActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -28215,26 +28127,30 @@ type SynapseSparkJobDefinitionActivity struct { // GetActivity implements the ActivityClassification interface for type SynapseSparkJobDefinitionActivity. func (s *SynapseSparkJobDefinitionActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type SynapseSparkJobDefinitionActivity. func (s *SynapseSparkJobDefinitionActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: s.AdditionalProperties, + DependsOn: s.DependsOn, + Description: s.Description, LinkedServiceName: s.LinkedServiceName, - Policy: s.Policy, Name: s.Name, + OnInactiveMarkAs: s.OnInactiveMarkAs, + Policy: s.Policy, + State: s.State, Type: s.Type, - Description: s.Description, - DependsOn: s.DependsOn, UserProperties: s.UserProperties, - AdditionalProperties: s.AdditionalProperties, } } @@ -28247,24 +28163,6 @@ type SynapseSparkJobReference struct { Type *SparkJobReferenceType } -// TabularSourceClassification provides polymorphic access to related types. -// Call the interface's GetTabularSource() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *AmazonMWSSource, *AmazonRdsForSQLServerSource, *AmazonRedshiftSource, *AzureMariaDBSource, *AzureMySQLSource, *AzurePostgreSQLSource, -// - *AzureSQLSource, *AzureTableSource, *CassandraSource, *ConcurSource, *CouchbaseSource, *Db2Source, *DrillSource, *DynamicsAXSource, -// - *EloquaSource, *GoogleAdWordsSource, *GoogleBigQuerySource, *GreenplumSource, *HBaseSource, *HiveSource, *HubspotSource, -// - *ImpalaSource, *InformixSource, *JiraSource, *MagentoSource, *MariaDBSource, *MarketoSource, *MySQLSource, *NetezzaSource, -// - *OdbcSource, *OracleServiceCloudSource, *PaypalSource, *PhoenixSource, *PostgreSQLSource, *PrestoSource, *QuickBooksSource, -// - *ResponsysSource, *SQLDWSource, *SQLMISource, *SQLServerSource, *SQLSource, *SalesforceMarketingCloudSource, *SalesforceSource, -// - *SapBwSource, *SapCloudForCustomerSource, *SapEccSource, *SapHanaSource, *SapOdpSource, *SapOpenHubSource, *SapTableSource, -// - *ServiceNowSource, *ShopifySource, *SparkSource, *SquareSource, *SybaseSource, *TabularSource, *TeradataSource, *VerticaSource, -// - *XeroSource, *ZohoSource -type TabularSourceClassification interface { - CopySourceClassification - // GetTabularSource returns the TabularSource content of the underlying type. - GetTabularSource() *TabularSource -} - // TabularSource - Copy activity sources of tabular type. type TabularSource struct { // REQUIRED; Copy source type. @@ -28296,12 +28194,12 @@ type TabularSource struct { // GetCopySource implements the CopySourceClassification interface for type TabularSource. func (t *TabularSource) GetCopySource() *CopySource { return &CopySource{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + DisableMetricsCollection: t.DisableMetricsCollection, + MaxConcurrentConnections: t.MaxConcurrentConnections, SourceRetryCount: t.SourceRetryCount, SourceRetryWait: t.SourceRetryWait, - MaxConcurrentConnections: t.MaxConcurrentConnections, - DisableMetricsCollection: t.DisableMetricsCollection, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -28323,8 +28221,8 @@ type TarGZipReadSettings struct { // GetCompressionReadSettings implements the CompressionReadSettingsClassification interface for type TarGZipReadSettings. func (t *TarGZipReadSettings) GetCompressionReadSettings() *CompressionReadSettings { return &CompressionReadSettings{ - Type: t.Type, AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -28343,8 +28241,8 @@ type TarReadSettings struct { // GetCompressionReadSettings implements the CompressionReadSettingsClassification interface for type TarReadSettings. func (t *TarReadSettings) GetCompressionReadSettings() *CompressionReadSettings { return &CompressionReadSettings{ - Type: t.Type, AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -28375,12 +28273,12 @@ type TeamDeskLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type TeamDeskLinkedService. func (t *TeamDeskLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + Annotations: t.Annotations, ConnectVia: t.ConnectVia, Description: t.Description, Parameters: t.Parameters, - Annotations: t.Annotations, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -28396,8 +28294,8 @@ type TeamDeskLinkedServiceTypeProperties struct { APIToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password of the TeamDesk source. Password SecretBaseClassification @@ -28433,12 +28331,12 @@ type TeradataLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type TeradataLinkedService. func (t *TeradataLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + Annotations: t.Annotations, ConnectVia: t.ConnectVia, Description: t.Description, Parameters: t.Parameters, - Annotations: t.Annotations, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -28451,8 +28349,8 @@ type TeradataLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // Password for authentication. Password SecretBaseClassification @@ -28519,26 +28417,26 @@ type TeradataSource struct { // GetCopySource implements the CopySourceClassification interface for type TeradataSource. func (t *TeradataSource) GetCopySource() *CopySource { return &CopySource{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + DisableMetricsCollection: t.DisableMetricsCollection, + MaxConcurrentConnections: t.MaxConcurrentConnections, SourceRetryCount: t.SourceRetryCount, SourceRetryWait: t.SourceRetryWait, - MaxConcurrentConnections: t.MaxConcurrentConnections, - DisableMetricsCollection: t.DisableMetricsCollection, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type TeradataSource. func (t *TeradataSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: t.QueryTimeout, AdditionalColumns: t.AdditionalColumns, - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + DisableMetricsCollection: t.DisableMetricsCollection, + MaxConcurrentConnections: t.MaxConcurrentConnections, + QueryTimeout: t.QueryTimeout, SourceRetryCount: t.SourceRetryCount, SourceRetryWait: t.SourceRetryWait, - MaxConcurrentConnections: t.MaxConcurrentConnections, - DisableMetricsCollection: t.DisableMetricsCollection, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -28579,15 +28477,15 @@ type TeradataTableDataset struct { // GetDataset implements the DatasetClassification interface for type TeradataTableDataset. func (t *TeradataTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + Annotations: t.Annotations, Description: t.Description, - Structure: t.Structure, - Schema: t.Schema, + Folder: t.Folder, LinkedServiceName: t.LinkedServiceName, Parameters: t.Parameters, - Annotations: t.Annotations, - Folder: t.Folder, - AdditionalProperties: t.AdditionalProperties, + Schema: t.Schema, + Structure: t.Structure, + Type: t.Type, } } @@ -28652,10 +28550,10 @@ type TextFormat struct { // GetDatasetStorageFormat implements the DatasetStorageFormatClassification interface for type TextFormat. func (t *TextFormat) GetDatasetStorageFormat() *DatasetStorageFormat { return &DatasetStorageFormat{ - Type: t.Type, - Serializer: t.Serializer, - Deserializer: t.Deserializer, AdditionalProperties: t.AdditionalProperties, + Deserializer: t.Deserializer, + Serializer: t.Serializer, + Type: t.Type, } } @@ -28677,16 +28575,6 @@ type Transformation struct { LinkedService *LinkedServiceReference } -// TriggerClassification provides polymorphic access to related types. -// Call the interface's GetTrigger() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *BlobEventsTrigger, *BlobTrigger, *ChainingTrigger, *CustomEventsTrigger, *MultiplePipelineTrigger, *RerunTumblingWindowTrigger, -// - *ScheduleTrigger, *Trigger, *TumblingWindowTrigger -type TriggerClassification interface { - // GetTrigger returns the Trigger content of the underlying type. - GetTrigger() *Trigger -} - // Trigger - Azure data factory nested object which contains information about creating pipeline run type Trigger struct { // REQUIRED; Trigger type. @@ -28708,16 +28596,6 @@ type Trigger struct { // GetTrigger implements the TriggerClassification interface for type Trigger. func (t *Trigger) GetTrigger() *Trigger { return t } -// TriggerDependencyReferenceClassification provides polymorphic access to related types. -// Call the interface's GetTriggerDependencyReference() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *TriggerDependencyReference, *TumblingWindowTriggerDependencyReference -type TriggerDependencyReferenceClassification interface { - DependencyReferenceClassification - // GetTriggerDependencyReference returns the TriggerDependencyReference content of the underlying type. - GetTriggerDependencyReference() *TriggerDependencyReference -} - // TriggerDependencyReference - Trigger referenced dependency. type TriggerDependencyReference struct { // REQUIRED; Referenced trigger. @@ -28838,21 +28716,6 @@ type TriggerRun struct { TriggeredPipelines map[string]*string } -// TriggerRunsClientCancelOptions contains the optional parameters for the TriggerRunsClient.Cancel method. -type TriggerRunsClientCancelOptions struct { - // placeholder for future optional parameters -} - -// TriggerRunsClientQueryByFactoryOptions contains the optional parameters for the TriggerRunsClient.QueryByFactory method. -type TriggerRunsClientQueryByFactoryOptions struct { - // placeholder for future optional parameters -} - -// TriggerRunsClientRerunOptions contains the optional parameters for the TriggerRunsClient.Rerun method. -type TriggerRunsClientRerunOptions struct { - // placeholder for future optional parameters -} - // TriggerRunsQueryResponse - A list of trigger runs. type TriggerRunsQueryResponse struct { // REQUIRED; List of trigger runs. @@ -28871,67 +28734,6 @@ type TriggerSubscriptionOperationStatus struct { TriggerName *string } -// TriggersClientBeginStartOptions contains the optional parameters for the TriggersClient.BeginStart method. -type TriggersClientBeginStartOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// TriggersClientBeginStopOptions contains the optional parameters for the TriggersClient.BeginStop method. -type TriggersClientBeginStopOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// TriggersClientBeginSubscribeToEventsOptions contains the optional parameters for the TriggersClient.BeginSubscribeToEvents -// method. -type TriggersClientBeginSubscribeToEventsOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// TriggersClientBeginUnsubscribeFromEventsOptions contains the optional parameters for the TriggersClient.BeginUnsubscribeFromEvents -// method. -type TriggersClientBeginUnsubscribeFromEventsOptions struct { - // Resumes the LRO from the provided token. - ResumeToken string -} - -// TriggersClientCreateOrUpdateOptions contains the optional parameters for the TriggersClient.CreateOrUpdate method. -type TriggersClientCreateOrUpdateOptions struct { - // ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * - // for unconditional update. - IfMatch *string -} - -// TriggersClientDeleteOptions contains the optional parameters for the TriggersClient.Delete method. -type TriggersClientDeleteOptions struct { - // placeholder for future optional parameters -} - -// TriggersClientGetEventSubscriptionStatusOptions contains the optional parameters for the TriggersClient.GetEventSubscriptionStatus -// method. -type TriggersClientGetEventSubscriptionStatusOptions struct { - // placeholder for future optional parameters -} - -// TriggersClientGetOptions contains the optional parameters for the TriggersClient.Get method. -type TriggersClientGetOptions struct { - // ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was - // provided, then no content will be returned. - IfNoneMatch *string -} - -// TriggersClientListByFactoryOptions contains the optional parameters for the TriggersClient.NewListByFactoryPager method. -type TriggersClientListByFactoryOptions struct { - // placeholder for future optional parameters -} - -// TriggersClientQueryByFactoryOptions contains the optional parameters for the TriggersClient.QueryByFactory method. -type TriggersClientQueryByFactoryOptions struct { - // placeholder for future optional parameters -} - // TumblingWindowTrigger - Trigger that schedules pipeline runs for all fixed time interval windows from a start time without // gaps and also supports backfill scenarios (when start time is in the past). type TumblingWindowTrigger struct { @@ -28960,11 +28762,11 @@ type TumblingWindowTrigger struct { // GetTrigger implements the TriggerClassification interface for type TumblingWindowTrigger. func (t *TumblingWindowTrigger) GetTrigger() *Trigger { return &Trigger{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + Annotations: t.Annotations, Description: t.Description, RuntimeState: t.RuntimeState, - Annotations: t.Annotations, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -29056,12 +28858,12 @@ type TwilioLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type TwilioLinkedService. func (t *TwilioLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: t.Type, + AdditionalProperties: t.AdditionalProperties, + Annotations: t.Annotations, ConnectVia: t.ConnectVia, Description: t.Description, Parameters: t.Parameters, - Annotations: t.Annotations, - AdditionalProperties: t.AdditionalProperties, + Type: t.Type, } } @@ -29070,7 +28872,7 @@ type TwilioLinkedServiceTypeProperties struct { // REQUIRED; The auth token of Twilio service. Password SecretBaseClassification - // REQUIRED; The Account SID of Twilio service. + // REQUIRED; The Account SID of Twilio service. Type: string (or Expression with resultType string). UserName any } @@ -29095,6 +28897,13 @@ type UntilActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -29102,24 +28911,28 @@ type UntilActivity struct { // GetActivity implements the ActivityClassification interface for type UntilActivity. func (u *UntilActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: u.AdditionalProperties, + DependsOn: u.DependsOn, + Description: u.Description, Name: u.Name, + OnInactiveMarkAs: u.OnInactiveMarkAs, + State: u.State, Type: u.Type, - Description: u.Description, - DependsOn: u.DependsOn, UserProperties: u.UserProperties, - AdditionalProperties: u.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type UntilActivity. func (u *UntilActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: u.AdditionalProperties, + DependsOn: u.DependsOn, + Description: u.Description, Name: u.Name, + OnInactiveMarkAs: u.OnInactiveMarkAs, + State: u.State, Type: u.Type, - Description: u.Description, - DependsOn: u.DependsOn, UserProperties: u.UserProperties, - AdditionalProperties: u.AdditionalProperties, } } @@ -29133,8 +28946,7 @@ type UntilActivityTypeProperties struct { // Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) // which is 1 week as default. Type: string (or Expression with resultType - // string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), - // pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + // string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Timeout any } @@ -29204,6 +29016,13 @@ type ValidationActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -29211,24 +29030,28 @@ type ValidationActivity struct { // GetActivity implements the ActivityClassification interface for type ValidationActivity. func (v *ValidationActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: v.AdditionalProperties, + DependsOn: v.DependsOn, + Description: v.Description, Name: v.Name, + OnInactiveMarkAs: v.OnInactiveMarkAs, + State: v.State, Type: v.Type, - Description: v.Description, - DependsOn: v.DependsOn, UserProperties: v.UserProperties, - AdditionalProperties: v.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type ValidationActivity. func (v *ValidationActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: v.AdditionalProperties, + DependsOn: v.DependsOn, + Description: v.Description, Name: v.Name, + OnInactiveMarkAs: v.OnInactiveMarkAs, + State: v.State, Type: v.Type, - Description: v.Description, - DependsOn: v.DependsOn, UserProperties: v.UserProperties, - AdditionalProperties: v.AdditionalProperties, } } @@ -29303,12 +29126,12 @@ type VerticaLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type VerticaLinkedService. func (v *VerticaLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: v.Type, + AdditionalProperties: v.AdditionalProperties, + Annotations: v.Annotations, ConnectVia: v.ConnectVia, Description: v.Description, Parameters: v.Parameters, - Annotations: v.Annotations, - AdditionalProperties: v.AdditionalProperties, + Type: v.Type, } } @@ -29318,8 +29141,8 @@ type VerticaLinkedServiceTypeProperties struct { ConnectionString any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The Azure key vault secret reference of password in connection string. Pwd *AzureKeyVaultSecretReference @@ -29359,26 +29182,26 @@ type VerticaSource struct { // GetCopySource implements the CopySourceClassification interface for type VerticaSource. func (v *VerticaSource) GetCopySource() *CopySource { return &CopySource{ - Type: v.Type, + AdditionalProperties: v.AdditionalProperties, + DisableMetricsCollection: v.DisableMetricsCollection, + MaxConcurrentConnections: v.MaxConcurrentConnections, SourceRetryCount: v.SourceRetryCount, SourceRetryWait: v.SourceRetryWait, - MaxConcurrentConnections: v.MaxConcurrentConnections, - DisableMetricsCollection: v.DisableMetricsCollection, - AdditionalProperties: v.AdditionalProperties, + Type: v.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type VerticaSource. func (v *VerticaSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: v.QueryTimeout, AdditionalColumns: v.AdditionalColumns, - Type: v.Type, + AdditionalProperties: v.AdditionalProperties, + DisableMetricsCollection: v.DisableMetricsCollection, + MaxConcurrentConnections: v.MaxConcurrentConnections, + QueryTimeout: v.QueryTimeout, SourceRetryCount: v.SourceRetryCount, SourceRetryWait: v.SourceRetryWait, - MaxConcurrentConnections: v.MaxConcurrentConnections, - DisableMetricsCollection: v.DisableMetricsCollection, - AdditionalProperties: v.AdditionalProperties, + Type: v.Type, } } @@ -29419,15 +29242,15 @@ type VerticaTableDataset struct { // GetDataset implements the DatasetClassification interface for type VerticaTableDataset. func (v *VerticaTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: v.Type, + AdditionalProperties: v.AdditionalProperties, + Annotations: v.Annotations, Description: v.Description, - Structure: v.Structure, - Schema: v.Schema, + Folder: v.Folder, LinkedServiceName: v.LinkedServiceName, Parameters: v.Parameters, - Annotations: v.Annotations, - Folder: v.Folder, - AdditionalProperties: v.AdditionalProperties, + Schema: v.Schema, + Structure: v.Structure, + Type: v.Type, } } @@ -29451,6 +29274,13 @@ type WaitActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -29458,30 +29288,34 @@ type WaitActivity struct { // GetActivity implements the ActivityClassification interface for type WaitActivity. func (w *WaitActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: w.AdditionalProperties, + DependsOn: w.DependsOn, + Description: w.Description, Name: w.Name, + OnInactiveMarkAs: w.OnInactiveMarkAs, + State: w.State, Type: w.Type, - Description: w.Description, - DependsOn: w.DependsOn, UserProperties: w.UserProperties, - AdditionalProperties: w.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type WaitActivity. func (w *WaitActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: w.AdditionalProperties, + DependsOn: w.DependsOn, + Description: w.Description, Name: w.Name, + OnInactiveMarkAs: w.OnInactiveMarkAs, + State: w.State, Type: w.Type, - Description: w.Description, - DependsOn: w.DependsOn, UserProperties: w.UserProperties, - AdditionalProperties: w.AdditionalProperties, } } // WaitActivityTypeProperties - Wait activity properties. type WaitActivityTypeProperties struct { - // REQUIRED; Duration in seconds. + // REQUIRED; Duration in seconds. Type: integer (or Expression with resultType integer). WaitTimeInSeconds any } @@ -29508,9 +29342,16 @@ type WebActivity struct { // Linked service reference. LinkedServiceName *LinkedServiceReference + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + // Activity policy. Policy *ActivityPolicy + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -29518,26 +29359,30 @@ type WebActivity struct { // GetActivity implements the ActivityClassification interface for type WebActivity. func (w *WebActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: w.AdditionalProperties, + DependsOn: w.DependsOn, + Description: w.Description, Name: w.Name, + OnInactiveMarkAs: w.OnInactiveMarkAs, + State: w.State, Type: w.Type, - Description: w.Description, - DependsOn: w.DependsOn, UserProperties: w.UserProperties, - AdditionalProperties: w.AdditionalProperties, } } // GetExecutionActivity implements the ExecutionActivityClassification interface for type WebActivity. func (w *WebActivity) GetExecutionActivity() *ExecutionActivity { return &ExecutionActivity{ + AdditionalProperties: w.AdditionalProperties, + DependsOn: w.DependsOn, + Description: w.Description, LinkedServiceName: w.LinkedServiceName, - Policy: w.Policy, Name: w.Name, + OnInactiveMarkAs: w.OnInactiveMarkAs, + Policy: w.Policy, + State: w.State, Type: w.Type, - Description: w.Description, - DependsOn: w.DependsOn, UserProperties: w.UserProperties, - AdditionalProperties: w.AdditionalProperties, } } @@ -29614,8 +29459,8 @@ type WebAnonymousAuthentication struct { // GetWebLinkedServiceTypeProperties implements the WebLinkedServiceTypePropertiesClassification interface for type WebAnonymousAuthentication. func (w *WebAnonymousAuthentication) GetWebLinkedServiceTypeProperties() *WebLinkedServiceTypeProperties { return &WebLinkedServiceTypeProperties{ - URL: w.URL, AuthenticationType: w.AuthenticationType, + URL: w.URL, } } @@ -29638,8 +29483,8 @@ type WebBasicAuthentication struct { // GetWebLinkedServiceTypeProperties implements the WebLinkedServiceTypePropertiesClassification interface for type WebBasicAuthentication. func (w *WebBasicAuthentication) GetWebLinkedServiceTypeProperties() *WebLinkedServiceTypeProperties { return &WebLinkedServiceTypeProperties{ - URL: w.URL, AuthenticationType: w.AuthenticationType, + URL: w.URL, } } @@ -29664,8 +29509,8 @@ type WebClientCertificateAuthentication struct { // GetWebLinkedServiceTypeProperties implements the WebLinkedServiceTypePropertiesClassification interface for type WebClientCertificateAuthentication. func (w *WebClientCertificateAuthentication) GetWebLinkedServiceTypeProperties() *WebLinkedServiceTypeProperties { return &WebLinkedServiceTypeProperties{ - URL: w.URL, AuthenticationType: w.AuthenticationType, + URL: w.URL, } } @@ -29689,6 +29534,16 @@ type WebHookActivity struct { // Activity description. Description *string + // Status result of the activity when the state is set to Inactive. This is an optional property and if not provided when + // the activity is inactive, the status will be Succeeded by default. + OnInactiveMarkAs *ActivityOnInactiveMarkAs + + // Activity policy. + Policy *SecureInputOutputPolicy + + // Activity state. This is an optional property and if not provided, the state will be Active by default. + State *ActivityState + // Activity user properties. UserProperties []*UserProperty } @@ -29696,24 +29551,28 @@ type WebHookActivity struct { // GetActivity implements the ActivityClassification interface for type WebHookActivity. func (w *WebHookActivity) GetActivity() *Activity { return &Activity{ + AdditionalProperties: w.AdditionalProperties, + DependsOn: w.DependsOn, + Description: w.Description, Name: w.Name, + OnInactiveMarkAs: w.OnInactiveMarkAs, + State: w.State, Type: w.Type, - Description: w.Description, - DependsOn: w.DependsOn, UserProperties: w.UserProperties, - AdditionalProperties: w.AdditionalProperties, } } // GetControlActivity implements the ControlActivityClassification interface for type WebHookActivity. func (w *WebHookActivity) GetControlActivity() *ControlActivity { return &ControlActivity{ + AdditionalProperties: w.AdditionalProperties, + DependsOn: w.DependsOn, + Description: w.Description, Name: w.Name, + OnInactiveMarkAs: w.OnInactiveMarkAs, + State: w.State, Type: w.Type, - Description: w.Description, - DependsOn: w.DependsOn, UserProperties: w.UserProperties, - AdditionalProperties: w.AdditionalProperties, } } @@ -29774,24 +29633,15 @@ type WebLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type WebLinkedService. func (w *WebLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: w.Type, + AdditionalProperties: w.AdditionalProperties, + Annotations: w.Annotations, ConnectVia: w.ConnectVia, Description: w.Description, Parameters: w.Parameters, - Annotations: w.Annotations, - AdditionalProperties: w.AdditionalProperties, + Type: w.Type, } } -// WebLinkedServiceTypePropertiesClassification provides polymorphic access to related types. -// Call the interface's GetWebLinkedServiceTypeProperties() method to access the common type. -// Use a type switch to determine the concrete type. The possible types are: -// - *WebAnonymousAuthentication, *WebBasicAuthentication, *WebClientCertificateAuthentication, *WebLinkedServiceTypeProperties -type WebLinkedServiceTypePropertiesClassification interface { - // GetWebLinkedServiceTypeProperties returns the WebLinkedServiceTypeProperties content of the underlying type. - GetWebLinkedServiceTypeProperties() *WebLinkedServiceTypeProperties -} - // WebLinkedServiceTypeProperties - Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic // based on authenticationType, so not flattened in SDK models. type WebLinkedServiceTypeProperties struct { @@ -29836,12 +29686,12 @@ type WebSource struct { // GetCopySource implements the CopySourceClassification interface for type WebSource. func (w *WebSource) GetCopySource() *CopySource { return &CopySource{ - Type: w.Type, + AdditionalProperties: w.AdditionalProperties, + DisableMetricsCollection: w.DisableMetricsCollection, + MaxConcurrentConnections: w.MaxConcurrentConnections, SourceRetryCount: w.SourceRetryCount, SourceRetryWait: w.SourceRetryWait, - MaxConcurrentConnections: w.MaxConcurrentConnections, - DisableMetricsCollection: w.DisableMetricsCollection, - AdditionalProperties: w.AdditionalProperties, + Type: w.Type, } } @@ -29882,15 +29732,15 @@ type WebTableDataset struct { // GetDataset implements the DatasetClassification interface for type WebTableDataset. func (w *WebTableDataset) GetDataset() *Dataset { return &Dataset{ - Type: w.Type, + AdditionalProperties: w.AdditionalProperties, + Annotations: w.Annotations, Description: w.Description, - Structure: w.Structure, - Schema: w.Schema, + Folder: w.Folder, LinkedServiceName: w.LinkedServiceName, Parameters: w.Parameters, - Annotations: w.Annotations, - Folder: w.Folder, - AdditionalProperties: w.AdditionalProperties, + Schema: w.Schema, + Structure: w.Structure, + Type: w.Type, } } @@ -29925,10 +29775,10 @@ type WranglingDataFlow struct { // GetDataFlow implements the DataFlowClassification interface for type WranglingDataFlow. func (w *WranglingDataFlow) GetDataFlow() *DataFlow { return &DataFlow{ - Type: w.Type, - Description: w.Description, Annotations: w.Annotations, + Description: w.Description, Folder: w.Folder, + Type: w.Type, } } @@ -29969,15 +29819,15 @@ type XMLDataset struct { // GetDataset implements the DatasetClassification interface for type XMLDataset. func (x *XMLDataset) GetDataset() *Dataset { return &Dataset{ - Type: x.Type, + AdditionalProperties: x.AdditionalProperties, + Annotations: x.Annotations, Description: x.Description, - Structure: x.Structure, - Schema: x.Schema, + Folder: x.Folder, LinkedServiceName: x.LinkedServiceName, Parameters: x.Parameters, - Annotations: x.Annotations, - Folder: x.Folder, - AdditionalProperties: x.AdditionalProperties, + Schema: x.Schema, + Structure: x.Structure, + Type: x.Type, } } @@ -30030,8 +29880,8 @@ type XMLReadSettings struct { // GetFormatReadSettings implements the FormatReadSettingsClassification interface for type XMLReadSettings. func (x *XMLReadSettings) GetFormatReadSettings() *FormatReadSettings { return &FormatReadSettings{ - Type: x.Type, AdditionalProperties: x.AdditionalProperties, + Type: x.Type, } } @@ -30069,12 +29919,12 @@ type XMLSource struct { // GetCopySource implements the CopySourceClassification interface for type XMLSource. func (x *XMLSource) GetCopySource() *CopySource { return &CopySource{ - Type: x.Type, + AdditionalProperties: x.AdditionalProperties, + DisableMetricsCollection: x.DisableMetricsCollection, + MaxConcurrentConnections: x.MaxConcurrentConnections, SourceRetryCount: x.SourceRetryCount, SourceRetryWait: x.SourceRetryWait, - MaxConcurrentConnections: x.MaxConcurrentConnections, - DisableMetricsCollection: x.DisableMetricsCollection, - AdditionalProperties: x.AdditionalProperties, + Type: x.Type, } } @@ -30105,12 +29955,12 @@ type XeroLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type XeroLinkedService. func (x *XeroLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: x.Type, + AdditionalProperties: x.AdditionalProperties, + Annotations: x.Annotations, ConnectVia: x.ConnectVia, Description: x.Description, Parameters: x.Parameters, - Annotations: x.Annotations, - AdditionalProperties: x.AdditionalProperties, + Type: x.Type, } } @@ -30123,8 +29973,8 @@ type XeroLinkedServiceTypeProperties struct { ConsumerKey SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The endpoint of the Xero server. (i.e. api.xero.com) Host any @@ -30181,15 +30031,15 @@ type XeroObjectDataset struct { // GetDataset implements the DatasetClassification interface for type XeroObjectDataset. func (x *XeroObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: x.Type, + AdditionalProperties: x.AdditionalProperties, + Annotations: x.Annotations, Description: x.Description, - Structure: x.Structure, - Schema: x.Schema, + Folder: x.Folder, LinkedServiceName: x.LinkedServiceName, Parameters: x.Parameters, - Annotations: x.Annotations, - Folder: x.Folder, - AdditionalProperties: x.AdditionalProperties, + Schema: x.Schema, + Structure: x.Structure, + Type: x.Type, } } @@ -30227,26 +30077,26 @@ type XeroSource struct { // GetCopySource implements the CopySourceClassification interface for type XeroSource. func (x *XeroSource) GetCopySource() *CopySource { return &CopySource{ - Type: x.Type, + AdditionalProperties: x.AdditionalProperties, + DisableMetricsCollection: x.DisableMetricsCollection, + MaxConcurrentConnections: x.MaxConcurrentConnections, SourceRetryCount: x.SourceRetryCount, SourceRetryWait: x.SourceRetryWait, - MaxConcurrentConnections: x.MaxConcurrentConnections, - DisableMetricsCollection: x.DisableMetricsCollection, - AdditionalProperties: x.AdditionalProperties, + Type: x.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type XeroSource. func (x *XeroSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: x.QueryTimeout, AdditionalColumns: x.AdditionalColumns, - Type: x.Type, + AdditionalProperties: x.AdditionalProperties, + DisableMetricsCollection: x.DisableMetricsCollection, + MaxConcurrentConnections: x.MaxConcurrentConnections, + QueryTimeout: x.QueryTimeout, SourceRetryCount: x.SourceRetryCount, SourceRetryWait: x.SourceRetryWait, - MaxConcurrentConnections: x.MaxConcurrentConnections, - DisableMetricsCollection: x.DisableMetricsCollection, - AdditionalProperties: x.AdditionalProperties, + Type: x.Type, } } @@ -30277,12 +30127,12 @@ type ZendeskLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ZendeskLinkedService. func (z *ZendeskLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: z.Type, + AdditionalProperties: z.AdditionalProperties, + Annotations: z.Annotations, ConnectVia: z.ConnectVia, Description: z.Description, Parameters: z.Parameters, - Annotations: z.Annotations, - AdditionalProperties: z.AdditionalProperties, + Type: z.Type, } } @@ -30298,8 +30148,8 @@ type ZendeskLinkedServiceTypeProperties struct { APIToken SecretBaseClassification // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The password of the Zendesk source. Password SecretBaseClassification @@ -30323,8 +30173,8 @@ type ZipDeflateReadSettings struct { // GetCompressionReadSettings implements the CompressionReadSettingsClassification interface for type ZipDeflateReadSettings. func (z *ZipDeflateReadSettings) GetCompressionReadSettings() *CompressionReadSettings { return &CompressionReadSettings{ - Type: z.Type, AdditionalProperties: z.AdditionalProperties, + Type: z.Type, } } @@ -30355,12 +30205,12 @@ type ZohoLinkedService struct { // GetLinkedService implements the LinkedServiceClassification interface for type ZohoLinkedService. func (z *ZohoLinkedService) GetLinkedService() *LinkedService { return &LinkedService{ - Type: z.Type, + AdditionalProperties: z.AdditionalProperties, + Annotations: z.Annotations, ConnectVia: z.ConnectVia, Description: z.Description, Parameters: z.Parameters, - Annotations: z.Annotations, - AdditionalProperties: z.AdditionalProperties, + Type: z.Type, } } @@ -30373,8 +30223,8 @@ type ZohoLinkedServiceTypeProperties struct { ConnectionProperties any // The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. - // Type: string (or Expression with resultType string). - EncryptedCredential any + // Type: string. + EncryptedCredential *string // The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) Endpoint any @@ -30427,15 +30277,15 @@ type ZohoObjectDataset struct { // GetDataset implements the DatasetClassification interface for type ZohoObjectDataset. func (z *ZohoObjectDataset) GetDataset() *Dataset { return &Dataset{ - Type: z.Type, + AdditionalProperties: z.AdditionalProperties, + Annotations: z.Annotations, Description: z.Description, - Structure: z.Structure, - Schema: z.Schema, + Folder: z.Folder, LinkedServiceName: z.LinkedServiceName, Parameters: z.Parameters, - Annotations: z.Annotations, - Folder: z.Folder, - AdditionalProperties: z.AdditionalProperties, + Schema: z.Schema, + Structure: z.Structure, + Type: z.Type, } } @@ -30473,25 +30323,25 @@ type ZohoSource struct { // GetCopySource implements the CopySourceClassification interface for type ZohoSource. func (z *ZohoSource) GetCopySource() *CopySource { return &CopySource{ - Type: z.Type, + AdditionalProperties: z.AdditionalProperties, + DisableMetricsCollection: z.DisableMetricsCollection, + MaxConcurrentConnections: z.MaxConcurrentConnections, SourceRetryCount: z.SourceRetryCount, SourceRetryWait: z.SourceRetryWait, - MaxConcurrentConnections: z.MaxConcurrentConnections, - DisableMetricsCollection: z.DisableMetricsCollection, - AdditionalProperties: z.AdditionalProperties, + Type: z.Type, } } // GetTabularSource implements the TabularSourceClassification interface for type ZohoSource. func (z *ZohoSource) GetTabularSource() *TabularSource { return &TabularSource{ - QueryTimeout: z.QueryTimeout, AdditionalColumns: z.AdditionalColumns, - Type: z.Type, + AdditionalProperties: z.AdditionalProperties, + DisableMetricsCollection: z.DisableMetricsCollection, + MaxConcurrentConnections: z.MaxConcurrentConnections, + QueryTimeout: z.QueryTimeout, SourceRetryCount: z.SourceRetryCount, SourceRetryWait: z.SourceRetryWait, - MaxConcurrentConnections: z.MaxConcurrentConnections, - DisableMetricsCollection: z.DisableMetricsCollection, - AdditionalProperties: z.AdditionalProperties, + Type: z.Type, } } diff --git a/sdk/resourcemanager/datafactory/armdatafactory/models_serde.go b/sdk/resourcemanager/datafactory/armdatafactory/models_serde.go index 4cb5700bf4e6..08c55b5c5c09 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/models_serde.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/models_serde.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -57,6 +56,8 @@ func (a Activity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", a.DependsOn) populate(objectMap, "description", a.Description) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) + populate(objectMap, "state", a.State) objectMap["type"] = a.Type populate(objectMap, "userProperties", a.UserProperties) if a.AdditionalProperties != nil { @@ -85,6 +86,12 @@ func (a *Activity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -427,7 +434,7 @@ func (a *AmazonMWSLinkedService) UnmarshalJSON(data []byte) error { func (a AmazonMWSLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "accessKeyId", a.AccessKeyID) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "endpoint", a.Endpoint) populateAny(objectMap, "marketplaceID", a.MarketplaceID) populate(objectMap, "mwsAuthToken", a.MwsAuthToken) @@ -634,7 +641,7 @@ func (a *AmazonMWSSource) UnmarshalJSON(data []byte) error { func (a AmazonRdsForLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", a.ConnectionString) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) return json.Marshal(objectMap) } @@ -1016,7 +1023,7 @@ func (a AmazonRdsForSQLServerLinkedServiceTypeProperties) MarshalJSON() ([]byte, objectMap := make(map[string]any) populate(objectMap, "alwaysEncryptedSettings", a.AlwaysEncryptedSettings) populateAny(objectMap, "connectionString", a.ConnectionString) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) populateAny(objectMap, "userName", a.UserName) return json.Marshal(objectMap) @@ -1059,6 +1066,7 @@ func (a AmazonRdsForSQLServerSource) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "additionalColumns", a.AdditionalColumns) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) + populateAny(objectMap, "isolationLevel", a.IsolationLevel) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "partitionOption", a.PartitionOption) populate(objectMap, "partitionSettings", a.PartitionSettings) @@ -1093,6 +1101,9 @@ func (a *AmazonRdsForSQLServerSource) UnmarshalJSON(data []byte) error { case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &a.DisableMetricsCollection) delete(rawMsg, key) + case "isolationLevel": + err = unpopulate(val, "IsolationLevel", &a.IsolationLevel) + delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &a.MaxConcurrentConnections) delete(rawMsg, key) @@ -1315,7 +1326,7 @@ func (a *AmazonRedshiftLinkedService) UnmarshalJSON(data []byte) error { func (a AmazonRedshiftLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "database", a.Database) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) populateAny(objectMap, "port", a.Port) populateAny(objectMap, "server", a.Server) @@ -1607,7 +1618,7 @@ func (a *AmazonS3CompatibleLinkedService) UnmarshalJSON(data []byte) error { func (a AmazonS3CompatibleLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "accessKeyId", a.AccessKeyID) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "forcePathStyle", a.ForcePathStyle) populate(objectMap, "secretAccessKey", a.SecretAccessKey) populateAny(objectMap, "serviceUrl", a.ServiceURL) @@ -1709,7 +1720,7 @@ func (a AmazonS3CompatibleReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", a.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", a.FileListPath) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", a.ModifiedDatetimeEnd) @@ -1990,7 +2001,7 @@ func (a AmazonS3LinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "accessKeyId", a.AccessKeyID) populateAny(objectMap, "authenticationType", a.AuthenticationType) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "secretAccessKey", a.SecretAccessKey) populateAny(objectMap, "serviceUrl", a.ServiceURL) populate(objectMap, "sessionToken", a.SessionToken) @@ -2095,7 +2106,7 @@ func (a AmazonS3ReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", a.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", a.FileListPath) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", a.ModifiedDatetimeEnd) @@ -2283,6 +2294,8 @@ func (a AppendVariableActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", a.DependsOn) populate(objectMap, "description", a.Description) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) + populate(objectMap, "state", a.State) objectMap["type"] = "AppendVariable" populate(objectMap, "typeProperties", a.TypeProperties) populate(objectMap, "userProperties", a.UserProperties) @@ -2312,6 +2325,12 @@ func (a *AppendVariableActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -2463,7 +2482,7 @@ func (a *AsanaLinkedService) UnmarshalJSON(data []byte) error { func (a AsanaLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiToken", a.APIToken) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) return json.Marshal(objectMap) } @@ -2974,7 +2993,7 @@ func (a AzureBatchLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "accountName", a.AccountName) populateAny(objectMap, "batchUri", a.BatchURI) populate(objectMap, "credential", a.Credential) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "linkedServiceName", a.LinkedServiceName) populateAny(objectMap, "poolName", a.PoolName) return json.Marshal(objectMap) @@ -3324,7 +3343,7 @@ func (a AzureBlobFSLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "accountKey", a.AccountKey) populateAny(objectMap, "azureCloudType", a.AzureCloudType) populate(objectMap, "credential", a.Credential) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "sasToken", a.SasToken) populateAny(objectMap, "sasUri", a.SasURI) populate(objectMap, "servicePrincipalCredential", a.ServicePrincipalCredential) @@ -3448,7 +3467,7 @@ func (a AzureBlobFSReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", a.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", a.FileListPath) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", a.ModifiedDatetimeEnd) @@ -3797,7 +3816,7 @@ func (a *AzureBlobStorageLinkedService) UnmarshalJSON(data []byte) error { func (a AzureBlobStorageLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "accountKey", a.AccountKey) - populate(objectMap, "accountKind", a.AccountKind) + populateAny(objectMap, "accountKind", a.AccountKind) populate(objectMap, "authenticationType", a.AuthenticationType) populateAny(objectMap, "azureCloudType", a.AzureCloudType) populateAny(objectMap, "connectionString", a.ConnectionString) @@ -3806,7 +3825,7 @@ func (a AzureBlobStorageLinkedServiceTypeProperties) MarshalJSON() ([]byte, erro populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "sasToken", a.SasToken) populateAny(objectMap, "sasUri", a.SasURI) - populate(objectMap, "serviceEndpoint", a.ServiceEndpoint) + populateAny(objectMap, "serviceEndpoint", a.ServiceEndpoint) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) populateAny(objectMap, "tenant", a.Tenant) @@ -3931,7 +3950,7 @@ func (a AzureBlobStorageReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", a.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", a.FileListPath) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", a.ModifiedDatetimeEnd) @@ -4081,7 +4100,9 @@ func (a AzureDataExplorerCommandActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", a.Description) populate(objectMap, "linkedServiceName", a.LinkedServiceName) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) populate(objectMap, "policy", a.Policy) + populate(objectMap, "state", a.State) objectMap["type"] = "AzureDataExplorerCommand" populate(objectMap, "typeProperties", a.TypeProperties) populate(objectMap, "userProperties", a.UserProperties) @@ -4114,9 +4135,15 @@ func (a *AzureDataExplorerCommandActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &a.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -4604,7 +4631,7 @@ func (a AzureDataLakeAnalyticsLinkedServiceTypeProperties) MarshalJSON() ([]byte objectMap := make(map[string]any) populateAny(objectMap, "accountName", a.AccountName) populateAny(objectMap, "dataLakeAnalyticsUri", a.DataLakeAnalyticsURI) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "resourceGroupName", a.ResourceGroupName) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) @@ -4836,7 +4863,7 @@ func (a AzureDataLakeStoreLinkedServiceTypeProperties) MarshalJSON() ([]byte, er populateAny(objectMap, "azureCloudType", a.AzureCloudType) populate(objectMap, "credential", a.Credential) populateAny(objectMap, "dataLakeStoreUri", a.DataLakeStoreURI) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "resourceGroupName", a.ResourceGroupName) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) @@ -4947,7 +4974,7 @@ func (a AzureDataLakeStoreReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", a.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", a.FileListPath) populateAny(objectMap, "listAfter", a.ListAfter) populateAny(objectMap, "listBefore", a.ListBefore) @@ -5644,7 +5671,7 @@ func (a AzureDatabricksDetltaLakeLinkedServiceTypeProperties) MarshalJSON() ([]b populateAny(objectMap, "clusterId", a.ClusterID) populate(objectMap, "credential", a.Credential) populateAny(objectMap, "domain", a.Domain) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "workspaceResourceId", a.WorkspaceResourceID) return json.Marshal(objectMap) } @@ -5753,7 +5780,7 @@ func (a AzureDatabricksLinkedServiceTypeProperties) MarshalJSON() ([]byte, error populateAny(objectMap, "authentication", a.Authentication) populate(objectMap, "credential", a.Credential) populateAny(objectMap, "domain", a.Domain) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "existingClusterId", a.ExistingClusterID) populateAny(objectMap, "instancePoolId", a.InstancePoolID) populate(objectMap, "newClusterCustomTags", a.NewClusterCustomTags) @@ -5912,7 +5939,7 @@ func (a AzureFileStorageLinkedServiceTypeProperties) MarshalJSON() ([]byte, erro objectMap := make(map[string]any) populate(objectMap, "accountKey", a.AccountKey) populateAny(objectMap, "connectionString", a.ConnectionString) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "fileShare", a.FileShare) populateAny(objectMap, "host", a.Host) populate(objectMap, "password", a.Password) @@ -6025,7 +6052,7 @@ func (a AzureFileStorageReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", a.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", a.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", a.FileListPath) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", a.ModifiedDatetimeEnd) @@ -6171,7 +6198,9 @@ func (a AzureFunctionActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", a.Description) populate(objectMap, "linkedServiceName", a.LinkedServiceName) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) populate(objectMap, "policy", a.Policy) + populate(objectMap, "state", a.State) objectMap["type"] = "AzureFunctionActivity" populate(objectMap, "typeProperties", a.TypeProperties) populate(objectMap, "userProperties", a.UserProperties) @@ -6204,9 +6233,15 @@ func (a *AzureFunctionActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &a.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -6340,7 +6375,7 @@ func (a AzureFunctionLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) objectMap := make(map[string]any) populateAny(objectMap, "authentication", a.Authentication) populate(objectMap, "credential", a.Credential) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "functionAppUrl", a.FunctionAppURL) populate(objectMap, "functionKey", a.FunctionKey) populateAny(objectMap, "resourceId", a.ResourceID) @@ -6521,7 +6556,9 @@ func (a AzureMLBatchExecutionActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", a.Description) populate(objectMap, "linkedServiceName", a.LinkedServiceName) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) populate(objectMap, "policy", a.Policy) + populate(objectMap, "state", a.State) objectMap["type"] = "AzureMLBatchExecution" populate(objectMap, "typeProperties", a.TypeProperties) populate(objectMap, "userProperties", a.UserProperties) @@ -6554,9 +6591,15 @@ func (a *AzureMLBatchExecutionActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &a.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -6626,7 +6669,9 @@ func (a AzureMLExecutePipelineActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", a.Description) populate(objectMap, "linkedServiceName", a.LinkedServiceName) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) populate(objectMap, "policy", a.Policy) + populate(objectMap, "state", a.State) objectMap["type"] = "AzureMLExecutePipeline" populate(objectMap, "typeProperties", a.TypeProperties) populate(objectMap, "userProperties", a.UserProperties) @@ -6659,9 +6704,15 @@ func (a *AzureMLExecutePipelineActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &a.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -6811,7 +6862,7 @@ func (a AzureMLLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiKey", a.APIKey) populateAny(objectMap, "authentication", a.Authentication) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "mlEndpoint", a.MlEndpoint) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) @@ -6926,7 +6977,8 @@ func (a *AzureMLServiceLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type AzureMLServiceLinkedServiceTypeProperties. func (a AzureMLServiceLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populateAny(objectMap, "authentication", a.Authentication) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populateAny(objectMap, "mlWorkspaceName", a.MlWorkspaceName) populateAny(objectMap, "resourceGroupName", a.ResourceGroupName) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) @@ -6945,6 +6997,9 @@ func (a *AzureMLServiceLinkedServiceTypeProperties) UnmarshalJSON(data []byte) e for key, val := range rawMsg { var err error switch key { + case "authentication": + err = unpopulate(val, "Authentication", &a.Authentication) + delete(rawMsg, key) case "encryptedCredential": err = unpopulate(val, "EncryptedCredential", &a.EncryptedCredential) delete(rawMsg, key) @@ -6981,7 +7036,9 @@ func (a AzureMLUpdateResourceActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", a.Description) populate(objectMap, "linkedServiceName", a.LinkedServiceName) populate(objectMap, "name", a.Name) + populate(objectMap, "onInactiveMarkAs", a.OnInactiveMarkAs) populate(objectMap, "policy", a.Policy) + populate(objectMap, "state", a.State) objectMap["type"] = "AzureMLUpdateResource" populate(objectMap, "typeProperties", a.TypeProperties) populate(objectMap, "userProperties", a.UserProperties) @@ -7014,9 +7071,15 @@ func (a *AzureMLUpdateResourceActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &a.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &a.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &a.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &a.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &a.Type) delete(rawMsg, key) @@ -7176,7 +7239,7 @@ func (a *AzureMariaDBLinkedService) UnmarshalJSON(data []byte) error { func (a AzureMariaDBLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", a.ConnectionString) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "pwd", a.Pwd) return json.Marshal(objectMap) } @@ -7417,7 +7480,7 @@ func (a *AzureMySQLLinkedService) UnmarshalJSON(data []byte) error { func (a AzureMySQLLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", a.ConnectionString) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) return json.Marshal(objectMap) } @@ -7759,7 +7822,7 @@ func (a *AzurePostgreSQLLinkedService) UnmarshalJSON(data []byte) error { func (a AzurePostgreSQLLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", a.ConnectionString) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) return json.Marshal(objectMap) } @@ -8173,7 +8236,7 @@ func (a AzureSQLDWLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "azureCloudType", a.AzureCloudType) populateAny(objectMap, "connectionString", a.ConnectionString) populate(objectMap, "credential", a.Credential) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) @@ -8400,7 +8463,7 @@ func (a AzureSQLDatabaseLinkedServiceTypeProperties) MarshalJSON() ([]byte, erro populateAny(objectMap, "azureCloudType", a.AzureCloudType) populateAny(objectMap, "connectionString", a.ConnectionString) populate(objectMap, "credential", a.Credential) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) @@ -8521,7 +8584,7 @@ func (a AzureSQLMILinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "azureCloudType", a.AzureCloudType) populateAny(objectMap, "connectionString", a.ConnectionString) populate(objectMap, "credential", a.Credential) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "password", a.Password) populateAny(objectMap, "servicePrincipalId", a.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", a.ServicePrincipalKey) @@ -8789,6 +8852,7 @@ func (a AzureSQLSource) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "additionalColumns", a.AdditionalColumns) populateAny(objectMap, "disableMetricsCollection", a.DisableMetricsCollection) + populateAny(objectMap, "isolationLevel", a.IsolationLevel) populateAny(objectMap, "maxConcurrentConnections", a.MaxConcurrentConnections) populateAny(objectMap, "partitionOption", a.PartitionOption) populate(objectMap, "partitionSettings", a.PartitionSettings) @@ -8823,6 +8887,9 @@ func (a *AzureSQLSource) UnmarshalJSON(data []byte) error { case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &a.DisableMetricsCollection) delete(rawMsg, key) + case "isolationLevel": + err = unpopulate(val, "IsolationLevel", &a.IsolationLevel) + delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &a.MaxConcurrentConnections) delete(rawMsg, key) @@ -9219,7 +9286,7 @@ func (a *AzureSearchLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type AzureSearchLinkedServiceTypeProperties. func (a AzureSearchLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", a.EncryptedCredential) + populate(objectMap, "encryptedCredential", a.EncryptedCredential) populate(objectMap, "key", a.Key) populateAny(objectMap, "url", a.URL) return json.Marshal(objectMap) @@ -10541,7 +10608,7 @@ func (c *CassandraLinkedService) UnmarshalJSON(data []byte) error { func (c CassandraLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "authenticationType", c.AuthenticationType) - populateAny(objectMap, "encryptedCredential", c.EncryptedCredential) + populate(objectMap, "encryptedCredential", c.EncryptedCredential) populateAny(objectMap, "host", c.Host) populate(objectMap, "password", c.Password) populateAny(objectMap, "port", c.Port) @@ -10856,6 +10923,173 @@ func (c *ChainingTriggerTypeProperties) UnmarshalJSON(data []byte) error { return nil } +// MarshalJSON implements the json.Marshaller interface for type ChangeDataCapture. +func (c ChangeDataCapture) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "allowVNetOverride", c.AllowVNetOverride) + populate(objectMap, "description", c.Description) + populate(objectMap, "folder", c.Folder) + populate(objectMap, "policy", c.Policy) + populate(objectMap, "sourceConnectionsInfo", c.SourceConnectionsInfo) + populate(objectMap, "status", c.Status) + populate(objectMap, "targetConnectionsInfo", c.TargetConnectionsInfo) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type ChangeDataCapture. +func (c *ChangeDataCapture) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "allowVNetOverride": + err = unpopulate(val, "AllowVNetOverride", &c.AllowVNetOverride) + delete(rawMsg, key) + case "description": + err = unpopulate(val, "Description", &c.Description) + delete(rawMsg, key) + case "folder": + err = unpopulate(val, "Folder", &c.Folder) + delete(rawMsg, key) + case "policy": + err = unpopulate(val, "Policy", &c.Policy) + delete(rawMsg, key) + case "sourceConnectionsInfo": + err = unpopulate(val, "SourceConnectionsInfo", &c.SourceConnectionsInfo) + delete(rawMsg, key) + case "status": + err = unpopulate(val, "Status", &c.Status) + delete(rawMsg, key) + case "targetConnectionsInfo": + err = unpopulate(val, "TargetConnectionsInfo", &c.TargetConnectionsInfo) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type ChangeDataCaptureFolder. +func (c ChangeDataCaptureFolder) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "name", c.Name) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type ChangeDataCaptureFolder. +func (c *ChangeDataCaptureFolder) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "name": + err = unpopulate(val, "Name", &c.Name) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type ChangeDataCaptureListResponse. +func (c ChangeDataCaptureListResponse) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "nextLink", c.NextLink) + populate(objectMap, "value", c.Value) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type ChangeDataCaptureListResponse. +func (c *ChangeDataCaptureListResponse) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "nextLink": + err = unpopulate(val, "NextLink", &c.NextLink) + delete(rawMsg, key) + case "value": + err = unpopulate(val, "Value", &c.Value) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type ChangeDataCaptureResource. +func (c ChangeDataCaptureResource) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "etag", c.Etag) + populate(objectMap, "id", c.ID) + populate(objectMap, "name", c.Name) + populate(objectMap, "properties", c.Properties) + populate(objectMap, "type", c.Type) + if c.AdditionalProperties != nil { + for key, val := range c.AdditionalProperties { + objectMap[key] = val + } + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type ChangeDataCaptureResource. +func (c *ChangeDataCaptureResource) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "etag": + err = unpopulate(val, "Etag", &c.Etag) + delete(rawMsg, key) + case "id": + err = unpopulate(val, "ID", &c.ID) + delete(rawMsg, key) + case "name": + err = unpopulate(val, "Name", &c.Name) + delete(rawMsg, key) + case "properties": + err = unpopulate(val, "Properties", &c.Properties) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &c.Type) + delete(rawMsg, key) + default: + if c.AdditionalProperties == nil { + c.AdditionalProperties = map[string]any{} + } + if val != nil { + var aux any + err = json.Unmarshal(val, &aux) + c.AdditionalProperties[key] = aux + } + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", c, err) + } + } + return nil +} + // MarshalJSON implements the json.Marshaller interface for type CmdkeySetup. func (c CmdkeySetup) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) @@ -11090,7 +11324,7 @@ func (c CommonDataServiceForAppsLinkedServiceTypeProperties) MarshalJSON() ([]by objectMap := make(map[string]any) populateAny(objectMap, "authenticationType", c.AuthenticationType) populateAny(objectMap, "deploymentType", c.DeploymentType) - populateAny(objectMap, "encryptedCredential", c.EncryptedCredential) + populate(objectMap, "encryptedCredential", c.EncryptedCredential) populateAny(objectMap, "hostName", c.HostName) populateAny(objectMap, "organizationName", c.OrganizationName) populate(objectMap, "password", c.Password) @@ -11440,7 +11674,7 @@ func (c ConcurLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", c.ClientID) populateAny(objectMap, "connectionProperties", c.ConnectionProperties) - populateAny(objectMap, "encryptedCredential", c.EncryptedCredential) + populate(objectMap, "encryptedCredential", c.EncryptedCredential) populate(objectMap, "password", c.Password) populateAny(objectMap, "useEncryptedEndpoints", c.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", c.UseHostVerification) @@ -11675,6 +11909,8 @@ func (c ControlActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", c.DependsOn) populate(objectMap, "description", c.Description) populate(objectMap, "name", c.Name) + populate(objectMap, "onInactiveMarkAs", c.OnInactiveMarkAs) + populate(objectMap, "state", c.State) objectMap["type"] = "Container" populate(objectMap, "userProperties", c.UserProperties) if c.AdditionalProperties != nil { @@ -11703,6 +11939,12 @@ func (c *ControlActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &c.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &c.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &c.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &c.Type) delete(rawMsg, key) @@ -11735,8 +11977,10 @@ func (c CopyActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "inputs", c.Inputs) populate(objectMap, "linkedServiceName", c.LinkedServiceName) populate(objectMap, "name", c.Name) + populate(objectMap, "onInactiveMarkAs", c.OnInactiveMarkAs) populate(objectMap, "outputs", c.Outputs) populate(objectMap, "policy", c.Policy) + populate(objectMap, "state", c.State) objectMap["type"] = "Copy" populate(objectMap, "typeProperties", c.TypeProperties) populate(objectMap, "userProperties", c.UserProperties) @@ -11772,12 +12016,18 @@ func (c *CopyActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &c.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &c.OnInactiveMarkAs) + delete(rawMsg, key) case "outputs": err = unpopulate(val, "Outputs", &c.Outputs) delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &c.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &c.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &c.Type) delete(rawMsg, key) @@ -12161,9 +12411,9 @@ func (c CosmosDbLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "connectionString", c.ConnectionString) populate(objectMap, "credential", c.Credential) populateAny(objectMap, "database", c.Database) - populateAny(objectMap, "encryptedCredential", c.EncryptedCredential) + populate(objectMap, "encryptedCredential", c.EncryptedCredential) populate(objectMap, "servicePrincipalCredential", c.ServicePrincipalCredential) - populate(objectMap, "servicePrincipalCredentialType", c.ServicePrincipalCredentialType) + populateAny(objectMap, "servicePrincipalCredentialType", c.ServicePrincipalCredentialType) populateAny(objectMap, "servicePrincipalId", c.ServicePrincipalID) populateAny(objectMap, "tenant", c.Tenant) return json.Marshal(objectMap) @@ -12884,7 +13134,7 @@ func (c CouchbaseLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", c.ConnectionString) populate(objectMap, "credString", c.CredString) - populateAny(objectMap, "encryptedCredential", c.EncryptedCredential) + populate(objectMap, "encryptedCredential", c.EncryptedCredential) return json.Marshal(objectMap) } @@ -13328,7 +13578,9 @@ func (c CustomActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", c.Description) populate(objectMap, "linkedServiceName", c.LinkedServiceName) populate(objectMap, "name", c.Name) + populate(objectMap, "onInactiveMarkAs", c.OnInactiveMarkAs) populate(objectMap, "policy", c.Policy) + populate(objectMap, "state", c.State) objectMap["type"] = "Custom" populate(objectMap, "typeProperties", c.TypeProperties) populate(objectMap, "userProperties", c.UserProperties) @@ -13361,9 +13613,15 @@ func (c *CustomActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &c.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &c.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &c.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &c.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &c.Type) delete(rawMsg, key) @@ -14486,7 +14744,9 @@ func (d DataLakeAnalyticsUSQLActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", d.Description) populate(objectMap, "linkedServiceName", d.LinkedServiceName) populate(objectMap, "name", d.Name) + populate(objectMap, "onInactiveMarkAs", d.OnInactiveMarkAs) populate(objectMap, "policy", d.Policy) + populate(objectMap, "state", d.State) objectMap["type"] = "DataLakeAnalyticsU-SQL" populate(objectMap, "typeProperties", d.TypeProperties) populate(objectMap, "userProperties", d.UserProperties) @@ -14519,9 +14779,15 @@ func (d *DataLakeAnalyticsUSQLActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &d.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &d.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &d.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &d.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &d.Type) delete(rawMsg, key) @@ -14600,6 +14866,49 @@ func (d *DataLakeAnalyticsUSQLActivityTypeProperties) UnmarshalJSON(data []byte) return nil } +// MarshalJSON implements the json.Marshaller interface for type DataMapperMapping. +func (d DataMapperMapping) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "attributeMappingInfo", d.AttributeMappingInfo) + populate(objectMap, "sourceConnectionReference", d.SourceConnectionReference) + populateAny(objectMap, "sourceDenormalizeInfo", d.SourceDenormalizeInfo) + populate(objectMap, "sourceEntityName", d.SourceEntityName) + populate(objectMap, "targetEntityName", d.TargetEntityName) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type DataMapperMapping. +func (d *DataMapperMapping) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", d, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "attributeMappingInfo": + err = unpopulate(val, "AttributeMappingInfo", &d.AttributeMappingInfo) + delete(rawMsg, key) + case "sourceConnectionReference": + err = unpopulate(val, "SourceConnectionReference", &d.SourceConnectionReference) + delete(rawMsg, key) + case "sourceDenormalizeInfo": + err = unpopulate(val, "SourceDenormalizeInfo", &d.SourceDenormalizeInfo) + delete(rawMsg, key) + case "sourceEntityName": + err = unpopulate(val, "SourceEntityName", &d.SourceEntityName) + delete(rawMsg, key) + case "targetEntityName": + err = unpopulate(val, "TargetEntityName", &d.TargetEntityName) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", d, err) + } + } + return nil +} + // MarshalJSON implements the json.Marshaller interface for type DatabricksNotebookActivity. func (d DatabricksNotebookActivity) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) @@ -14607,7 +14916,9 @@ func (d DatabricksNotebookActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", d.Description) populate(objectMap, "linkedServiceName", d.LinkedServiceName) populate(objectMap, "name", d.Name) + populate(objectMap, "onInactiveMarkAs", d.OnInactiveMarkAs) populate(objectMap, "policy", d.Policy) + populate(objectMap, "state", d.State) objectMap["type"] = "DatabricksNotebook" populate(objectMap, "typeProperties", d.TypeProperties) populate(objectMap, "userProperties", d.UserProperties) @@ -14640,9 +14951,15 @@ func (d *DatabricksNotebookActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &d.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &d.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &d.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &d.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &d.Type) delete(rawMsg, key) @@ -14712,7 +15029,9 @@ func (d DatabricksSparkJarActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", d.Description) populate(objectMap, "linkedServiceName", d.LinkedServiceName) populate(objectMap, "name", d.Name) + populate(objectMap, "onInactiveMarkAs", d.OnInactiveMarkAs) populate(objectMap, "policy", d.Policy) + populate(objectMap, "state", d.State) objectMap["type"] = "DatabricksSparkJar" populate(objectMap, "typeProperties", d.TypeProperties) populate(objectMap, "userProperties", d.UserProperties) @@ -14745,9 +15064,15 @@ func (d *DatabricksSparkJarActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &d.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &d.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &d.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &d.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &d.Type) delete(rawMsg, key) @@ -14817,7 +15142,9 @@ func (d DatabricksSparkPythonActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", d.Description) populate(objectMap, "linkedServiceName", d.LinkedServiceName) populate(objectMap, "name", d.Name) + populate(objectMap, "onInactiveMarkAs", d.OnInactiveMarkAs) populate(objectMap, "policy", d.Policy) + populate(objectMap, "state", d.State) objectMap["type"] = "DatabricksSparkPython" populate(objectMap, "typeProperties", d.TypeProperties) populate(objectMap, "userProperties", d.UserProperties) @@ -14850,9 +15177,15 @@ func (d *DatabricksSparkPythonActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &d.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &d.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &d.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &d.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &d.Type) delete(rawMsg, key) @@ -15364,7 +15697,7 @@ func (d *DataworldLinkedService) UnmarshalJSON(data []byte) error { func (d DataworldLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiToken", d.APIToken) - populateAny(objectMap, "encryptedCredential", d.EncryptedCredential) + populate(objectMap, "encryptedCredential", d.EncryptedCredential) return json.Marshal(objectMap) } @@ -15460,7 +15793,7 @@ func (d Db2LinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "certificateCommonName", d.CertificateCommonName) populateAny(objectMap, "connectionString", d.ConnectionString) populateAny(objectMap, "database", d.Database) - populateAny(objectMap, "encryptedCredential", d.EncryptedCredential) + populate(objectMap, "encryptedCredential", d.EncryptedCredential) populateAny(objectMap, "packageCollection", d.PackageCollection) populate(objectMap, "password", d.Password) populateAny(objectMap, "server", d.Server) @@ -15698,7 +16031,9 @@ func (d DeleteActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", d.Description) populate(objectMap, "linkedServiceName", d.LinkedServiceName) populate(objectMap, "name", d.Name) + populate(objectMap, "onInactiveMarkAs", d.OnInactiveMarkAs) populate(objectMap, "policy", d.Policy) + populate(objectMap, "state", d.State) objectMap["type"] = "Delete" populate(objectMap, "typeProperties", d.TypeProperties) populate(objectMap, "userProperties", d.UserProperties) @@ -15731,9 +16066,15 @@ func (d *DeleteActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &d.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &d.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &d.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &d.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &d.Type) delete(rawMsg, key) @@ -16636,7 +16977,7 @@ func (d *DrillLinkedService) UnmarshalJSON(data []byte) error { func (d DrillLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", d.ConnectionString) - populateAny(objectMap, "encryptedCredential", d.EncryptedCredential) + populate(objectMap, "encryptedCredential", d.EncryptedCredential) populate(objectMap, "pwd", d.Pwd) return json.Marshal(objectMap) } @@ -16877,7 +17218,7 @@ func (d *DynamicsAXLinkedService) UnmarshalJSON(data []byte) error { func (d DynamicsAXLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "aadResourceId", d.AADResourceID) - populateAny(objectMap, "encryptedCredential", d.EncryptedCredential) + populate(objectMap, "encryptedCredential", d.EncryptedCredential) populateAny(objectMap, "servicePrincipalId", d.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", d.ServicePrincipalKey) populateAny(objectMap, "tenant", d.Tenant) @@ -17263,7 +17604,7 @@ func (d DynamicsCrmLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "authenticationType", d.AuthenticationType) populateAny(objectMap, "deploymentType", d.DeploymentType) - populateAny(objectMap, "encryptedCredential", d.EncryptedCredential) + populate(objectMap, "encryptedCredential", d.EncryptedCredential) populateAny(objectMap, "hostName", d.HostName) populateAny(objectMap, "organizationName", d.OrganizationName) populate(objectMap, "password", d.Password) @@ -17642,7 +17983,7 @@ func (d DynamicsLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "authenticationType", d.AuthenticationType) populate(objectMap, "credential", d.Credential) populateAny(objectMap, "deploymentType", d.DeploymentType) - populateAny(objectMap, "encryptedCredential", d.EncryptedCredential) + populate(objectMap, "encryptedCredential", d.EncryptedCredential) populateAny(objectMap, "hostName", d.HostName) populateAny(objectMap, "organizationName", d.OrganizationName) populate(objectMap, "password", d.Password) @@ -17920,7 +18261,7 @@ func (e *EloquaLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type EloquaLinkedServiceTypeProperties. func (e EloquaLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", e.EncryptedCredential) + populate(objectMap, "encryptedCredential", e.EncryptedCredential) populateAny(objectMap, "endpoint", e.Endpoint) populate(objectMap, "password", e.Password) populateAny(objectMap, "useEncryptedEndpoints", e.UseEncryptedEndpoints) @@ -18442,7 +18783,9 @@ func (e ExecuteDataFlowActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", e.Description) populate(objectMap, "linkedServiceName", e.LinkedServiceName) populate(objectMap, "name", e.Name) + populate(objectMap, "onInactiveMarkAs", e.OnInactiveMarkAs) populate(objectMap, "policy", e.Policy) + populate(objectMap, "state", e.State) objectMap["type"] = "ExecuteDataFlow" populate(objectMap, "typeProperties", e.TypeProperties) populate(objectMap, "userProperties", e.UserProperties) @@ -18475,9 +18818,15 @@ func (e *ExecuteDataFlowActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &e.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &e.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &e.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &e.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &e.Type) delete(rawMsg, key) @@ -18597,7 +18946,9 @@ func (e ExecutePipelineActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", e.DependsOn) populate(objectMap, "description", e.Description) populate(objectMap, "name", e.Name) + populate(objectMap, "onInactiveMarkAs", e.OnInactiveMarkAs) populate(objectMap, "policy", e.Policy) + populate(objectMap, "state", e.State) objectMap["type"] = "ExecutePipeline" populate(objectMap, "typeProperties", e.TypeProperties) populate(objectMap, "userProperties", e.UserProperties) @@ -18627,9 +18978,15 @@ func (e *ExecutePipelineActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &e.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &e.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &e.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &e.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &e.Type) delete(rawMsg, key) @@ -18804,7 +19161,9 @@ func (e ExecuteSSISPackageActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", e.Description) populate(objectMap, "linkedServiceName", e.LinkedServiceName) populate(objectMap, "name", e.Name) + populate(objectMap, "onInactiveMarkAs", e.OnInactiveMarkAs) populate(objectMap, "policy", e.Policy) + populate(objectMap, "state", e.State) objectMap["type"] = "ExecuteSSISPackage" populate(objectMap, "typeProperties", e.TypeProperties) populate(objectMap, "userProperties", e.UserProperties) @@ -18837,9 +19196,15 @@ func (e *ExecuteSSISPackageActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &e.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &e.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &e.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &e.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &e.Type) delete(rawMsg, key) @@ -18944,7 +19309,9 @@ func (e ExecuteWranglingDataflowActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", e.DependsOn) populate(objectMap, "description", e.Description) populate(objectMap, "name", e.Name) + populate(objectMap, "onInactiveMarkAs", e.OnInactiveMarkAs) populate(objectMap, "policy", e.Policy) + populate(objectMap, "state", e.State) objectMap["type"] = "ExecuteWranglingDataflow" populate(objectMap, "typeProperties", e.TypeProperties) populate(objectMap, "userProperties", e.UserProperties) @@ -18974,9 +19341,15 @@ func (e *ExecuteWranglingDataflowActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &e.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &e.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &e.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &e.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &e.Type) delete(rawMsg, key) @@ -19011,7 +19384,9 @@ func (e ExecutionActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", e.Description) populate(objectMap, "linkedServiceName", e.LinkedServiceName) populate(objectMap, "name", e.Name) + populate(objectMap, "onInactiveMarkAs", e.OnInactiveMarkAs) populate(objectMap, "policy", e.Policy) + populate(objectMap, "state", e.State) objectMap["type"] = "Execution" populate(objectMap, "userProperties", e.UserProperties) if e.AdditionalProperties != nil { @@ -19043,9 +19418,15 @@ func (e *ExecutionActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &e.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &e.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &e.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &e.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &e.Type) delete(rawMsg, key) @@ -19726,6 +20107,8 @@ func (f FailActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", f.DependsOn) populate(objectMap, "description", f.Description) populate(objectMap, "name", f.Name) + populate(objectMap, "onInactiveMarkAs", f.OnInactiveMarkAs) + populate(objectMap, "state", f.State) objectMap["type"] = "Fail" populate(objectMap, "typeProperties", f.TypeProperties) populate(objectMap, "userProperties", f.UserProperties) @@ -19755,6 +20138,12 @@ func (f *FailActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &f.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &f.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &f.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &f.Type) delete(rawMsg, key) @@ -19878,7 +20267,7 @@ func (f *FileServerLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type FileServerLinkedServiceTypeProperties. func (f FileServerLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", f.EncryptedCredential) + populate(objectMap, "encryptedCredential", f.EncryptedCredential) populateAny(objectMap, "host", f.Host) populate(objectMap, "password", f.Password) populateAny(objectMap, "userId", f.UserID) @@ -19969,7 +20358,7 @@ func (f FileServerReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", f.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", f.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", f.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", f.EnablePartitionDiscovery) populateAny(objectMap, "fileFilter", f.FileFilter) populateAny(objectMap, "fileListPath", f.FileListPath) populateAny(objectMap, "maxConcurrentConnections", f.MaxConcurrentConnections) @@ -20375,6 +20764,8 @@ func (f FilterActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", f.DependsOn) populate(objectMap, "description", f.Description) populate(objectMap, "name", f.Name) + populate(objectMap, "onInactiveMarkAs", f.OnInactiveMarkAs) + populate(objectMap, "state", f.State) objectMap["type"] = "Filter" populate(objectMap, "typeProperties", f.TypeProperties) populate(objectMap, "userProperties", f.UserProperties) @@ -20404,6 +20795,12 @@ func (f *FilterActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &f.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &f.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &f.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &f.Type) delete(rawMsg, key) @@ -20554,6 +20951,8 @@ func (f ForEachActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", f.DependsOn) populate(objectMap, "description", f.Description) populate(objectMap, "name", f.Name) + populate(objectMap, "onInactiveMarkAs", f.OnInactiveMarkAs) + populate(objectMap, "state", f.State) objectMap["type"] = "ForEach" populate(objectMap, "typeProperties", f.TypeProperties) populate(objectMap, "userProperties", f.UserProperties) @@ -20583,6 +20982,12 @@ func (f *ForEachActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &f.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &f.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &f.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &f.Type) delete(rawMsg, key) @@ -20739,13 +21144,13 @@ func (f FtpReadSettings) MarshalJSON() ([]byte, error) { populateAny(objectMap, "deleteFilesAfterCompletion", f.DeleteFilesAfterCompletion) populateAny(objectMap, "disableChunking", f.DisableChunking) populateAny(objectMap, "disableMetricsCollection", f.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", f.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", f.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", f.FileListPath) populateAny(objectMap, "maxConcurrentConnections", f.MaxConcurrentConnections) populateAny(objectMap, "partitionRootPath", f.PartitionRootPath) populateAny(objectMap, "recursive", f.Recursive) objectMap["type"] = "FtpReadSettings" - populate(objectMap, "useBinaryTransfer", f.UseBinaryTransfer) + populateAny(objectMap, "useBinaryTransfer", f.UseBinaryTransfer) populateAny(objectMap, "wildcardFileName", f.WildcardFileName) populateAny(objectMap, "wildcardFolderPath", f.WildcardFolderPath) if f.AdditionalProperties != nil { @@ -20887,7 +21292,7 @@ func (f FtpServerLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "authenticationType", f.AuthenticationType) populateAny(objectMap, "enableSsl", f.EnableSSL) populateAny(objectMap, "enableServerCertificateValidation", f.EnableServerCertificateValidation) - populateAny(objectMap, "encryptedCredential", f.EncryptedCredential) + populate(objectMap, "encryptedCredential", f.EncryptedCredential) populateAny(objectMap, "host", f.Host) populate(objectMap, "password", f.Password) populateAny(objectMap, "port", f.Port) @@ -21020,7 +21425,9 @@ func (g GetMetadataActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", g.Description) populate(objectMap, "linkedServiceName", g.LinkedServiceName) populate(objectMap, "name", g.Name) + populate(objectMap, "onInactiveMarkAs", g.OnInactiveMarkAs) populate(objectMap, "policy", g.Policy) + populate(objectMap, "state", g.State) objectMap["type"] = "GetMetadata" populate(objectMap, "typeProperties", g.TypeProperties) populate(objectMap, "userProperties", g.UserProperties) @@ -21053,9 +21460,15 @@ func (g *GetMetadataActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &g.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &g.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &g.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &g.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &g.Type) delete(rawMsg, key) @@ -21423,9 +21836,13 @@ func (g GoogleAdWordsLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) populateAny(objectMap, "connectionProperties", g.ConnectionProperties) populate(objectMap, "developerToken", g.DeveloperToken) populateAny(objectMap, "email", g.Email) - populateAny(objectMap, "encryptedCredential", g.EncryptedCredential) + populate(objectMap, "encryptedCredential", g.EncryptedCredential) + populateAny(objectMap, "googleAdsApiVersion", g.GoogleAdsAPIVersion) populateAny(objectMap, "keyFilePath", g.KeyFilePath) + populateAny(objectMap, "loginCustomerID", g.LoginCustomerID) + populate(objectMap, "privateKey", g.PrivateKey) populate(objectMap, "refreshToken", g.RefreshToken) + populateAny(objectMap, "supportLegacyDataTypes", g.SupportLegacyDataTypes) populateAny(objectMap, "trustedCertPath", g.TrustedCertPath) populateAny(objectMap, "useSystemTrustStore", g.UseSystemTrustStore) return json.Marshal(objectMap) @@ -21464,12 +21881,24 @@ func (g *GoogleAdWordsLinkedServiceTypeProperties) UnmarshalJSON(data []byte) er case "encryptedCredential": err = unpopulate(val, "EncryptedCredential", &g.EncryptedCredential) delete(rawMsg, key) + case "googleAdsApiVersion": + err = unpopulate(val, "GoogleAdsAPIVersion", &g.GoogleAdsAPIVersion) + delete(rawMsg, key) case "keyFilePath": err = unpopulate(val, "KeyFilePath", &g.KeyFilePath) delete(rawMsg, key) + case "loginCustomerID": + err = unpopulate(val, "LoginCustomerID", &g.LoginCustomerID) + delete(rawMsg, key) + case "privateKey": + g.PrivateKey, err = unmarshalSecretBaseClassification(val) + delete(rawMsg, key) case "refreshToken": g.RefreshToken, err = unmarshalSecretBaseClassification(val) delete(rawMsg, key) + case "supportLegacyDataTypes": + err = unpopulate(val, "SupportLegacyDataTypes", &g.SupportLegacyDataTypes) + delete(rawMsg, key) case "trustedCertPath": err = unpopulate(val, "TrustedCertPath", &g.TrustedCertPath) delete(rawMsg, key) @@ -21733,7 +22162,7 @@ func (g GoogleBigQueryLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) populateAny(objectMap, "clientId", g.ClientID) populate(objectMap, "clientSecret", g.ClientSecret) populateAny(objectMap, "email", g.Email) - populateAny(objectMap, "encryptedCredential", g.EncryptedCredential) + populate(objectMap, "encryptedCredential", g.EncryptedCredential) populateAny(objectMap, "keyFilePath", g.KeyFilePath) populateAny(objectMap, "project", g.Project) populate(objectMap, "refreshToken", g.RefreshToken) @@ -22006,7 +22435,7 @@ func (g *GoogleCloudStorageLinkedService) UnmarshalJSON(data []byte) error { func (g GoogleCloudStorageLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "accessKeyId", g.AccessKeyID) - populateAny(objectMap, "encryptedCredential", g.EncryptedCredential) + populate(objectMap, "encryptedCredential", g.EncryptedCredential) populate(objectMap, "secretAccessKey", g.SecretAccessKey) populateAny(objectMap, "serviceUrl", g.ServiceURL) return json.Marshal(objectMap) @@ -22104,7 +22533,7 @@ func (g GoogleCloudStorageReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", g.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", g.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", g.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", g.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", g.FileListPath) populateAny(objectMap, "maxConcurrentConnections", g.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", g.ModifiedDatetimeEnd) @@ -22255,7 +22684,7 @@ func (g *GoogleSheetsLinkedService) UnmarshalJSON(data []byte) error { func (g GoogleSheetsLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiToken", g.APIToken) - populateAny(objectMap, "encryptedCredential", g.EncryptedCredential) + populate(objectMap, "encryptedCredential", g.EncryptedCredential) return json.Marshal(objectMap) } @@ -22383,7 +22812,7 @@ func (g *GreenplumLinkedService) UnmarshalJSON(data []byte) error { func (g GreenplumLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", g.ConnectionString) - populateAny(objectMap, "encryptedCredential", g.EncryptedCredential) + populate(objectMap, "encryptedCredential", g.EncryptedCredential) populate(objectMap, "pwd", g.Pwd) return json.Marshal(objectMap) } @@ -22627,7 +23056,7 @@ func (h HBaseLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "allowSelfSignedServerCert", h.AllowSelfSignedServerCert) populate(objectMap, "authenticationType", h.AuthenticationType) populateAny(objectMap, "enableSsl", h.EnableSSL) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populateAny(objectMap, "httpPath", h.HTTPPath) populateAny(objectMap, "host", h.Host) populate(objectMap, "password", h.Password) @@ -22838,7 +23267,9 @@ func (h HDInsightHiveActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", h.Description) populate(objectMap, "linkedServiceName", h.LinkedServiceName) populate(objectMap, "name", h.Name) + populate(objectMap, "onInactiveMarkAs", h.OnInactiveMarkAs) populate(objectMap, "policy", h.Policy) + populate(objectMap, "state", h.State) objectMap["type"] = "HDInsightHive" populate(objectMap, "typeProperties", h.TypeProperties) populate(objectMap, "userProperties", h.UserProperties) @@ -22871,9 +23302,15 @@ func (h *HDInsightHiveActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &h.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &h.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &h.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &h.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &h.Type) delete(rawMsg, key) @@ -23022,7 +23459,7 @@ func (h *HDInsightLinkedService) UnmarshalJSON(data []byte) error { func (h HDInsightLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clusterUri", h.ClusterURI) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populateAny(objectMap, "fileSystem", h.FileSystem) populate(objectMap, "hcatalogLinkedServiceName", h.HcatalogLinkedServiceName) populateAny(objectMap, "isEspEnabled", h.IsEspEnabled) @@ -23080,7 +23517,9 @@ func (h HDInsightMapReduceActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", h.Description) populate(objectMap, "linkedServiceName", h.LinkedServiceName) populate(objectMap, "name", h.Name) + populate(objectMap, "onInactiveMarkAs", h.OnInactiveMarkAs) populate(objectMap, "policy", h.Policy) + populate(objectMap, "state", h.State) objectMap["type"] = "HDInsightMapReduce" populate(objectMap, "typeProperties", h.TypeProperties) populate(objectMap, "userProperties", h.UserProperties) @@ -23113,9 +23552,15 @@ func (h *HDInsightMapReduceActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &h.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &h.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &h.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &h.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &h.Type) delete(rawMsg, key) @@ -23275,7 +23720,7 @@ func (h HDInsightOnDemandLinkedServiceTypeProperties) MarshalJSON() ([]byte, err populateAny(objectMap, "coreConfiguration", h.CoreConfiguration) populate(objectMap, "credential", h.Credential) populateAny(objectMap, "dataNodeSize", h.DataNodeSize) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populateAny(objectMap, "hBaseConfiguration", h.HBaseConfiguration) populate(objectMap, "hcatalogLinkedServiceName", h.HcatalogLinkedServiceName) populateAny(objectMap, "hdfsConfiguration", h.HdfsConfiguration) @@ -23426,7 +23871,9 @@ func (h HDInsightPigActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", h.Description) populate(objectMap, "linkedServiceName", h.LinkedServiceName) populate(objectMap, "name", h.Name) + populate(objectMap, "onInactiveMarkAs", h.OnInactiveMarkAs) populate(objectMap, "policy", h.Policy) + populate(objectMap, "state", h.State) objectMap["type"] = "HDInsightPig" populate(objectMap, "typeProperties", h.TypeProperties) populate(objectMap, "userProperties", h.UserProperties) @@ -23459,250 +23906,272 @@ func (h *HDInsightPigActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &h.Name) delete(rawMsg, key) - case "policy": - err = unpopulate(val, "Policy", &h.Policy) - delete(rawMsg, key) - case "type": - err = unpopulate(val, "Type", &h.Type) - delete(rawMsg, key) - case "typeProperties": - err = unpopulate(val, "TypeProperties", &h.TypeProperties) - delete(rawMsg, key) - case "userProperties": - err = unpopulate(val, "UserProperties", &h.UserProperties) - delete(rawMsg, key) - default: - if h.AdditionalProperties == nil { - h.AdditionalProperties = map[string]any{} - } - if val != nil { - var aux any - err = json.Unmarshal(val, &aux) - h.AdditionalProperties[key] = aux - } - delete(rawMsg, key) - } - if err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - } - return nil -} - -// MarshalJSON implements the json.Marshaller interface for type HDInsightPigActivityTypeProperties. -func (h HDInsightPigActivityTypeProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]any) - populateAny(objectMap, "arguments", h.Arguments) - populate(objectMap, "defines", h.Defines) - populate(objectMap, "getDebugInfo", h.GetDebugInfo) - populate(objectMap, "scriptLinkedService", h.ScriptLinkedService) - populateAny(objectMap, "scriptPath", h.ScriptPath) - populate(objectMap, "storageLinkedServices", h.StorageLinkedServices) - return json.Marshal(objectMap) -} - -// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightPigActivityTypeProperties. -func (h *HDInsightPigActivityTypeProperties) UnmarshalJSON(data []byte) error { - var rawMsg map[string]json.RawMessage - if err := json.Unmarshal(data, &rawMsg); err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - for key, val := range rawMsg { - var err error - switch key { - case "arguments": - err = unpopulate(val, "Arguments", &h.Arguments) - delete(rawMsg, key) - case "defines": - err = unpopulate(val, "Defines", &h.Defines) - delete(rawMsg, key) - case "getDebugInfo": - err = unpopulate(val, "GetDebugInfo", &h.GetDebugInfo) - delete(rawMsg, key) - case "scriptLinkedService": - err = unpopulate(val, "ScriptLinkedService", &h.ScriptLinkedService) - delete(rawMsg, key) - case "scriptPath": - err = unpopulate(val, "ScriptPath", &h.ScriptPath) - delete(rawMsg, key) - case "storageLinkedServices": - err = unpopulate(val, "StorageLinkedServices", &h.StorageLinkedServices) - delete(rawMsg, key) - } - if err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - } - return nil -} - -// MarshalJSON implements the json.Marshaller interface for type HDInsightSparkActivity. -func (h HDInsightSparkActivity) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]any) - populate(objectMap, "dependsOn", h.DependsOn) - populate(objectMap, "description", h.Description) - populate(objectMap, "linkedServiceName", h.LinkedServiceName) - populate(objectMap, "name", h.Name) - populate(objectMap, "policy", h.Policy) - objectMap["type"] = "HDInsightSpark" - populate(objectMap, "typeProperties", h.TypeProperties) - populate(objectMap, "userProperties", h.UserProperties) - if h.AdditionalProperties != nil { - for key, val := range h.AdditionalProperties { - objectMap[key] = val - } - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightSparkActivity. -func (h *HDInsightSparkActivity) UnmarshalJSON(data []byte) error { - var rawMsg map[string]json.RawMessage - if err := json.Unmarshal(data, &rawMsg); err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - for key, val := range rawMsg { - var err error - switch key { - case "dependsOn": - err = unpopulate(val, "DependsOn", &h.DependsOn) - delete(rawMsg, key) - case "description": - err = unpopulate(val, "Description", &h.Description) - delete(rawMsg, key) - case "linkedServiceName": - err = unpopulate(val, "LinkedServiceName", &h.LinkedServiceName) - delete(rawMsg, key) - case "name": - err = unpopulate(val, "Name", &h.Name) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &h.OnInactiveMarkAs) delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &h.Policy) delete(rawMsg, key) - case "type": - err = unpopulate(val, "Type", &h.Type) - delete(rawMsg, key) - case "typeProperties": - err = unpopulate(val, "TypeProperties", &h.TypeProperties) - delete(rawMsg, key) - case "userProperties": - err = unpopulate(val, "UserProperties", &h.UserProperties) - delete(rawMsg, key) - default: - if h.AdditionalProperties == nil { - h.AdditionalProperties = map[string]any{} - } - if val != nil { - var aux any - err = json.Unmarshal(val, &aux) - h.AdditionalProperties[key] = aux - } - delete(rawMsg, key) - } - if err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - } - return nil -} - -// MarshalJSON implements the json.Marshaller interface for type HDInsightSparkActivityTypeProperties. -func (h HDInsightSparkActivityTypeProperties) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]any) - populate(objectMap, "arguments", h.Arguments) - populate(objectMap, "className", h.ClassName) - populateAny(objectMap, "entryFilePath", h.EntryFilePath) - populate(objectMap, "getDebugInfo", h.GetDebugInfo) - populateAny(objectMap, "proxyUser", h.ProxyUser) - populateAny(objectMap, "rootPath", h.RootPath) - populate(objectMap, "sparkConfig", h.SparkConfig) - populate(objectMap, "sparkJobLinkedService", h.SparkJobLinkedService) - return json.Marshal(objectMap) -} - -// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightSparkActivityTypeProperties. -func (h *HDInsightSparkActivityTypeProperties) UnmarshalJSON(data []byte) error { - var rawMsg map[string]json.RawMessage - if err := json.Unmarshal(data, &rawMsg); err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - for key, val := range rawMsg { - var err error - switch key { - case "arguments": - err = unpopulate(val, "Arguments", &h.Arguments) - delete(rawMsg, key) - case "className": - err = unpopulate(val, "ClassName", &h.ClassName) - delete(rawMsg, key) - case "entryFilePath": - err = unpopulate(val, "EntryFilePath", &h.EntryFilePath) - delete(rawMsg, key) - case "getDebugInfo": - err = unpopulate(val, "GetDebugInfo", &h.GetDebugInfo) - delete(rawMsg, key) - case "proxyUser": - err = unpopulate(val, "ProxyUser", &h.ProxyUser) - delete(rawMsg, key) - case "rootPath": - err = unpopulate(val, "RootPath", &h.RootPath) - delete(rawMsg, key) - case "sparkConfig": - err = unpopulate(val, "SparkConfig", &h.SparkConfig) - delete(rawMsg, key) - case "sparkJobLinkedService": - err = unpopulate(val, "SparkJobLinkedService", &h.SparkJobLinkedService) - delete(rawMsg, key) - } - if err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - } - return nil -} - -// MarshalJSON implements the json.Marshaller interface for type HDInsightStreamingActivity. -func (h HDInsightStreamingActivity) MarshalJSON() ([]byte, error) { - objectMap := make(map[string]any) - populate(objectMap, "dependsOn", h.DependsOn) - populate(objectMap, "description", h.Description) - populate(objectMap, "linkedServiceName", h.LinkedServiceName) - populate(objectMap, "name", h.Name) - populate(objectMap, "policy", h.Policy) - objectMap["type"] = "HDInsightStreaming" - populate(objectMap, "typeProperties", h.TypeProperties) - populate(objectMap, "userProperties", h.UserProperties) - if h.AdditionalProperties != nil { - for key, val := range h.AdditionalProperties { - objectMap[key] = val - } - } - return json.Marshal(objectMap) -} - -// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightStreamingActivity. -func (h *HDInsightStreamingActivity) UnmarshalJSON(data []byte) error { - var rawMsg map[string]json.RawMessage - if err := json.Unmarshal(data, &rawMsg); err != nil { - return fmt.Errorf("unmarshalling type %T: %v", h, err) - } - for key, val := range rawMsg { - var err error - switch key { - case "dependsOn": - err = unpopulate(val, "DependsOn", &h.DependsOn) - delete(rawMsg, key) - case "description": - err = unpopulate(val, "Description", &h.Description) - delete(rawMsg, key) - case "linkedServiceName": - err = unpopulate(val, "LinkedServiceName", &h.LinkedServiceName) - delete(rawMsg, key) - case "name": - err = unpopulate(val, "Name", &h.Name) - delete(rawMsg, key) - case "policy": - err = unpopulate(val, "Policy", &h.Policy) + case "state": + err = unpopulate(val, "State", &h.State) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &h.Type) + delete(rawMsg, key) + case "typeProperties": + err = unpopulate(val, "TypeProperties", &h.TypeProperties) + delete(rawMsg, key) + case "userProperties": + err = unpopulate(val, "UserProperties", &h.UserProperties) + delete(rawMsg, key) + default: + if h.AdditionalProperties == nil { + h.AdditionalProperties = map[string]any{} + } + if val != nil { + var aux any + err = json.Unmarshal(val, &aux) + h.AdditionalProperties[key] = aux + } + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type HDInsightPigActivityTypeProperties. +func (h HDInsightPigActivityTypeProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populateAny(objectMap, "arguments", h.Arguments) + populate(objectMap, "defines", h.Defines) + populate(objectMap, "getDebugInfo", h.GetDebugInfo) + populate(objectMap, "scriptLinkedService", h.ScriptLinkedService) + populateAny(objectMap, "scriptPath", h.ScriptPath) + populate(objectMap, "storageLinkedServices", h.StorageLinkedServices) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightPigActivityTypeProperties. +func (h *HDInsightPigActivityTypeProperties) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "arguments": + err = unpopulate(val, "Arguments", &h.Arguments) + delete(rawMsg, key) + case "defines": + err = unpopulate(val, "Defines", &h.Defines) + delete(rawMsg, key) + case "getDebugInfo": + err = unpopulate(val, "GetDebugInfo", &h.GetDebugInfo) + delete(rawMsg, key) + case "scriptLinkedService": + err = unpopulate(val, "ScriptLinkedService", &h.ScriptLinkedService) + delete(rawMsg, key) + case "scriptPath": + err = unpopulate(val, "ScriptPath", &h.ScriptPath) + delete(rawMsg, key) + case "storageLinkedServices": + err = unpopulate(val, "StorageLinkedServices", &h.StorageLinkedServices) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type HDInsightSparkActivity. +func (h HDInsightSparkActivity) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "dependsOn", h.DependsOn) + populate(objectMap, "description", h.Description) + populate(objectMap, "linkedServiceName", h.LinkedServiceName) + populate(objectMap, "name", h.Name) + populate(objectMap, "onInactiveMarkAs", h.OnInactiveMarkAs) + populate(objectMap, "policy", h.Policy) + populate(objectMap, "state", h.State) + objectMap["type"] = "HDInsightSpark" + populate(objectMap, "typeProperties", h.TypeProperties) + populate(objectMap, "userProperties", h.UserProperties) + if h.AdditionalProperties != nil { + for key, val := range h.AdditionalProperties { + objectMap[key] = val + } + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightSparkActivity. +func (h *HDInsightSparkActivity) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "dependsOn": + err = unpopulate(val, "DependsOn", &h.DependsOn) + delete(rawMsg, key) + case "description": + err = unpopulate(val, "Description", &h.Description) + delete(rawMsg, key) + case "linkedServiceName": + err = unpopulate(val, "LinkedServiceName", &h.LinkedServiceName) + delete(rawMsg, key) + case "name": + err = unpopulate(val, "Name", &h.Name) + delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &h.OnInactiveMarkAs) + delete(rawMsg, key) + case "policy": + err = unpopulate(val, "Policy", &h.Policy) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &h.State) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &h.Type) + delete(rawMsg, key) + case "typeProperties": + err = unpopulate(val, "TypeProperties", &h.TypeProperties) + delete(rawMsg, key) + case "userProperties": + err = unpopulate(val, "UserProperties", &h.UserProperties) + delete(rawMsg, key) + default: + if h.AdditionalProperties == nil { + h.AdditionalProperties = map[string]any{} + } + if val != nil { + var aux any + err = json.Unmarshal(val, &aux) + h.AdditionalProperties[key] = aux + } + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type HDInsightSparkActivityTypeProperties. +func (h HDInsightSparkActivityTypeProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "arguments", h.Arguments) + populate(objectMap, "className", h.ClassName) + populateAny(objectMap, "entryFilePath", h.EntryFilePath) + populate(objectMap, "getDebugInfo", h.GetDebugInfo) + populateAny(objectMap, "proxyUser", h.ProxyUser) + populateAny(objectMap, "rootPath", h.RootPath) + populate(objectMap, "sparkConfig", h.SparkConfig) + populate(objectMap, "sparkJobLinkedService", h.SparkJobLinkedService) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightSparkActivityTypeProperties. +func (h *HDInsightSparkActivityTypeProperties) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "arguments": + err = unpopulate(val, "Arguments", &h.Arguments) + delete(rawMsg, key) + case "className": + err = unpopulate(val, "ClassName", &h.ClassName) + delete(rawMsg, key) + case "entryFilePath": + err = unpopulate(val, "EntryFilePath", &h.EntryFilePath) + delete(rawMsg, key) + case "getDebugInfo": + err = unpopulate(val, "GetDebugInfo", &h.GetDebugInfo) + delete(rawMsg, key) + case "proxyUser": + err = unpopulate(val, "ProxyUser", &h.ProxyUser) + delete(rawMsg, key) + case "rootPath": + err = unpopulate(val, "RootPath", &h.RootPath) + delete(rawMsg, key) + case "sparkConfig": + err = unpopulate(val, "SparkConfig", &h.SparkConfig) + delete(rawMsg, key) + case "sparkJobLinkedService": + err = unpopulate(val, "SparkJobLinkedService", &h.SparkJobLinkedService) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type HDInsightStreamingActivity. +func (h HDInsightStreamingActivity) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "dependsOn", h.DependsOn) + populate(objectMap, "description", h.Description) + populate(objectMap, "linkedServiceName", h.LinkedServiceName) + populate(objectMap, "name", h.Name) + populate(objectMap, "onInactiveMarkAs", h.OnInactiveMarkAs) + populate(objectMap, "policy", h.Policy) + populate(objectMap, "state", h.State) + objectMap["type"] = "HDInsightStreaming" + populate(objectMap, "typeProperties", h.TypeProperties) + populate(objectMap, "userProperties", h.UserProperties) + if h.AdditionalProperties != nil { + for key, val := range h.AdditionalProperties { + objectMap[key] = val + } + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type HDInsightStreamingActivity. +func (h *HDInsightStreamingActivity) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", h, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "dependsOn": + err = unpopulate(val, "DependsOn", &h.DependsOn) + delete(rawMsg, key) + case "description": + err = unpopulate(val, "Description", &h.Description) + delete(rawMsg, key) + case "linkedServiceName": + err = unpopulate(val, "LinkedServiceName", &h.LinkedServiceName) + delete(rawMsg, key) + case "name": + err = unpopulate(val, "Name", &h.Name) + delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &h.OnInactiveMarkAs) + delete(rawMsg, key) + case "policy": + err = unpopulate(val, "Policy", &h.Policy) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &h.State) delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &h.Type) @@ -23993,7 +24462,7 @@ func (h HTTPLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "certThumbprint", h.CertThumbprint) populateAny(objectMap, "embeddedCertData", h.EmbeddedCertData) populateAny(objectMap, "enableServerCertificateValidation", h.EnableServerCertificateValidation) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populate(objectMap, "password", h.Password) populateAny(objectMap, "url", h.URL) populateAny(objectMap, "userName", h.UserName) @@ -24047,11 +24516,10 @@ func (h *HTTPLinkedServiceTypeProperties) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type HTTPReadSettings. func (h HTTPReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) + populateAny(objectMap, "additionalColumns", h.AdditionalColumns) populateAny(objectMap, "additionalHeaders", h.AdditionalHeaders) populateAny(objectMap, "disableMetricsCollection", h.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", h.EnablePartitionDiscovery) populateAny(objectMap, "maxConcurrentConnections", h.MaxConcurrentConnections) - populateAny(objectMap, "partitionRootPath", h.PartitionRootPath) populateAny(objectMap, "requestBody", h.RequestBody) populateAny(objectMap, "requestMethod", h.RequestMethod) populateAny(objectMap, "requestTimeout", h.RequestTimeout) @@ -24073,21 +24541,18 @@ func (h *HTTPReadSettings) UnmarshalJSON(data []byte) error { for key, val := range rawMsg { var err error switch key { + case "additionalColumns": + err = unpopulate(val, "AdditionalColumns", &h.AdditionalColumns) + delete(rawMsg, key) case "additionalHeaders": err = unpopulate(val, "AdditionalHeaders", &h.AdditionalHeaders) delete(rawMsg, key) case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &h.DisableMetricsCollection) delete(rawMsg, key) - case "enablePartitionDiscovery": - err = unpopulate(val, "EnablePartitionDiscovery", &h.EnablePartitionDiscovery) - delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &h.MaxConcurrentConnections) delete(rawMsg, key) - case "partitionRootPath": - err = unpopulate(val, "PartitionRootPath", &h.PartitionRootPath) - delete(rawMsg, key) case "requestBody": err = unpopulate(val, "RequestBody", &h.RequestBody) delete(rawMsg, key) @@ -24300,7 +24765,7 @@ func (h *HdfsLinkedService) UnmarshalJSON(data []byte) error { func (h HdfsLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "authenticationType", h.AuthenticationType) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populate(objectMap, "password", h.Password) populateAny(objectMap, "url", h.URL) populateAny(objectMap, "userName", h.UserName) @@ -24395,7 +24860,7 @@ func (h HdfsReadSettings) MarshalJSON() ([]byte, error) { populateAny(objectMap, "deleteFilesAfterCompletion", h.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", h.DisableMetricsCollection) populate(objectMap, "distcpSettings", h.DistcpSettings) - populate(objectMap, "enablePartitionDiscovery", h.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", h.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", h.FileListPath) populateAny(objectMap, "maxConcurrentConnections", h.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", h.ModifiedDatetimeEnd) @@ -24649,7 +25114,7 @@ func (h HiveLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "allowSelfSignedServerCert", h.AllowSelfSignedServerCert) populate(objectMap, "authenticationType", h.AuthenticationType) populateAny(objectMap, "enableSsl", h.EnableSSL) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populateAny(objectMap, "httpPath", h.HTTPPath) populateAny(objectMap, "host", h.Host) populate(objectMap, "password", h.Password) @@ -24945,7 +25410,7 @@ func (h HubspotLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "accessToken", h.AccessToken) populateAny(objectMap, "clientId", h.ClientID) populate(objectMap, "clientSecret", h.ClientSecret) - populateAny(objectMap, "encryptedCredential", h.EncryptedCredential) + populate(objectMap, "encryptedCredential", h.EncryptedCredential) populate(objectMap, "refreshToken", h.RefreshToken) populateAny(objectMap, "useEncryptedEndpoints", h.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", h.UseHostVerification) @@ -25144,6 +25609,8 @@ func (i IfConditionActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", i.DependsOn) populate(objectMap, "description", i.Description) populate(objectMap, "name", i.Name) + populate(objectMap, "onInactiveMarkAs", i.OnInactiveMarkAs) + populate(objectMap, "state", i.State) objectMap["type"] = "IfCondition" populate(objectMap, "typeProperties", i.TypeProperties) populate(objectMap, "userProperties", i.UserProperties) @@ -25173,6 +25640,12 @@ func (i *IfConditionActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &i.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &i.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &i.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &i.Type) delete(rawMsg, key) @@ -25339,7 +25812,7 @@ func (i ImpalaLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "allowSelfSignedServerCert", i.AllowSelfSignedServerCert) populate(objectMap, "authenticationType", i.AuthenticationType) populateAny(objectMap, "enableSsl", i.EnableSSL) - populateAny(objectMap, "encryptedCredential", i.EncryptedCredential) + populate(objectMap, "encryptedCredential", i.EncryptedCredential) populateAny(objectMap, "host", i.Host) populate(objectMap, "password", i.Password) populateAny(objectMap, "port", i.Port) @@ -25653,7 +26126,7 @@ func (i InformixLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "authenticationType", i.AuthenticationType) populateAny(objectMap, "connectionString", i.ConnectionString) populate(objectMap, "credential", i.Credential) - populateAny(objectMap, "encryptedCredential", i.EncryptedCredential) + populate(objectMap, "encryptedCredential", i.EncryptedCredential) populate(objectMap, "password", i.Password) populateAny(objectMap, "userName", i.UserName) return json.Marshal(objectMap) @@ -26208,6 +26681,7 @@ func (i IntegrationRuntimeDataFlowProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "cleanup", i.Cleanup) populate(objectMap, "computeType", i.ComputeType) populate(objectMap, "coreCount", i.CoreCount) + populate(objectMap, "customProperties", i.CustomProperties) populate(objectMap, "timeToLive", i.TimeToLive) if i.AdditionalProperties != nil { for key, val := range i.AdditionalProperties { @@ -26235,6 +26709,9 @@ func (i *IntegrationRuntimeDataFlowProperties) UnmarshalJSON(data []byte) error case "coreCount": err = unpopulate(val, "CoreCount", &i.CoreCount) delete(rawMsg, key) + case "customProperties": + err = unpopulate(val, "CustomProperties", &i.CustomProperties) + delete(rawMsg, key) case "timeToLive": err = unpopulate(val, "TimeToLive", &i.TimeToLive) delete(rawMsg, key) @@ -26256,6 +26733,37 @@ func (i *IntegrationRuntimeDataFlowProperties) UnmarshalJSON(data []byte) error return nil } +// MarshalJSON implements the json.Marshaller interface for type IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem. +func (i IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "name", i.Name) + populate(objectMap, "value", i.Value) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem. +func (i *IntegrationRuntimeDataFlowPropertiesCustomPropertiesItem) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", i, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "name": + err = unpopulate(val, "Name", &i.Name) + delete(rawMsg, key) + case "value": + err = unpopulate(val, "Value", &i.Value) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", i, err) + } + } + return nil +} + // MarshalJSON implements the json.Marshaller interface for type IntegrationRuntimeDataProxyProperties. func (i IntegrationRuntimeDataProxyProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) @@ -27445,7 +27953,7 @@ func (j *JiraLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type JiraLinkedServiceTypeProperties. func (j JiraLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", j.EncryptedCredential) + populate(objectMap, "encryptedCredential", j.EncryptedCredential) populateAny(objectMap, "host", j.Host) populate(objectMap, "password", j.Password) populateAny(objectMap, "port", j.Port) @@ -28160,7 +28668,9 @@ func (l LookupActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", l.Description) populate(objectMap, "linkedServiceName", l.LinkedServiceName) populate(objectMap, "name", l.Name) + populate(objectMap, "onInactiveMarkAs", l.OnInactiveMarkAs) populate(objectMap, "policy", l.Policy) + populate(objectMap, "state", l.State) objectMap["type"] = "Lookup" populate(objectMap, "typeProperties", l.TypeProperties) populate(objectMap, "userProperties", l.UserProperties) @@ -28193,9 +28703,15 @@ func (l *LookupActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &l.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &l.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &l.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &l.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &l.Type) delete(rawMsg, key) @@ -28324,7 +28840,7 @@ func (m *MagentoLinkedService) UnmarshalJSON(data []byte) error { func (m MagentoLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "accessToken", m.AccessToken) - populateAny(objectMap, "encryptedCredential", m.EncryptedCredential) + populate(objectMap, "encryptedCredential", m.EncryptedCredential) populateAny(objectMap, "host", m.Host) populateAny(objectMap, "useEncryptedEndpoints", m.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", m.UseHostVerification) @@ -29274,6 +29790,445 @@ func (m *ManagedVirtualNetworkResource) UnmarshalJSON(data []byte) error { return nil } +// MarshalJSON implements the json.Marshaller interface for type MapperAttributeMapping. +func (m MapperAttributeMapping) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "attributeReference", m.AttributeReference) + populate(objectMap, "attributeReferences", m.AttributeReferences) + populate(objectMap, "expression", m.Expression) + populate(objectMap, "functionName", m.FunctionName) + populate(objectMap, "name", m.Name) + populate(objectMap, "type", m.Type) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperAttributeMapping. +func (m *MapperAttributeMapping) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "attributeReference": + err = unpopulate(val, "AttributeReference", &m.AttributeReference) + delete(rawMsg, key) + case "attributeReferences": + err = unpopulate(val, "AttributeReferences", &m.AttributeReferences) + delete(rawMsg, key) + case "expression": + err = unpopulate(val, "Expression", &m.Expression) + delete(rawMsg, key) + case "functionName": + err = unpopulate(val, "FunctionName", &m.FunctionName) + delete(rawMsg, key) + case "name": + err = unpopulate(val, "Name", &m.Name) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &m.Type) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperAttributeMappings. +func (m MapperAttributeMappings) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "attributeMappings", m.AttributeMappings) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperAttributeMappings. +func (m *MapperAttributeMappings) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "attributeMappings": + err = unpopulate(val, "AttributeMappings", &m.AttributeMappings) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperAttributeReference. +func (m MapperAttributeReference) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "entity", m.Entity) + populate(objectMap, "entityConnectionReference", m.EntityConnectionReference) + populate(objectMap, "name", m.Name) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperAttributeReference. +func (m *MapperAttributeReference) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "entity": + err = unpopulate(val, "Entity", &m.Entity) + delete(rawMsg, key) + case "entityConnectionReference": + err = unpopulate(val, "EntityConnectionReference", &m.EntityConnectionReference) + delete(rawMsg, key) + case "name": + err = unpopulate(val, "Name", &m.Name) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperConnection. +func (m MapperConnection) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "commonDslConnectorProperties", m.CommonDslConnectorProperties) + populate(objectMap, "isInlineDataset", m.IsInlineDataset) + populate(objectMap, "linkedService", m.LinkedService) + populate(objectMap, "linkedServiceType", m.LinkedServiceType) + populate(objectMap, "type", m.Type) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperConnection. +func (m *MapperConnection) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "commonDslConnectorProperties": + err = unpopulate(val, "CommonDslConnectorProperties", &m.CommonDslConnectorProperties) + delete(rawMsg, key) + case "isInlineDataset": + err = unpopulate(val, "IsInlineDataset", &m.IsInlineDataset) + delete(rawMsg, key) + case "linkedService": + err = unpopulate(val, "LinkedService", &m.LinkedService) + delete(rawMsg, key) + case "linkedServiceType": + err = unpopulate(val, "LinkedServiceType", &m.LinkedServiceType) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &m.Type) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperConnectionReference. +func (m MapperConnectionReference) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "connectionName", m.ConnectionName) + populate(objectMap, "type", m.Type) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperConnectionReference. +func (m *MapperConnectionReference) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "connectionName": + err = unpopulate(val, "ConnectionName", &m.ConnectionName) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &m.Type) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperDslConnectorProperties. +func (m MapperDslConnectorProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "name", m.Name) + populateAny(objectMap, "value", m.Value) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperDslConnectorProperties. +func (m *MapperDslConnectorProperties) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "name": + err = unpopulate(val, "Name", &m.Name) + delete(rawMsg, key) + case "value": + err = unpopulate(val, "Value", &m.Value) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperPolicy. +func (m MapperPolicy) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "mode", m.Mode) + populate(objectMap, "recurrence", m.Recurrence) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperPolicy. +func (m *MapperPolicy) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "mode": + err = unpopulate(val, "Mode", &m.Mode) + delete(rawMsg, key) + case "recurrence": + err = unpopulate(val, "Recurrence", &m.Recurrence) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperPolicyRecurrence. +func (m MapperPolicyRecurrence) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "frequency", m.Frequency) + populate(objectMap, "interval", m.Interval) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperPolicyRecurrence. +func (m *MapperPolicyRecurrence) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "frequency": + err = unpopulate(val, "Frequency", &m.Frequency) + delete(rawMsg, key) + case "interval": + err = unpopulate(val, "Interval", &m.Interval) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperSourceConnectionsInfo. +func (m MapperSourceConnectionsInfo) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "connection", m.Connection) + populate(objectMap, "sourceEntities", m.SourceEntities) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperSourceConnectionsInfo. +func (m *MapperSourceConnectionsInfo) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "connection": + err = unpopulate(val, "Connection", &m.Connection) + delete(rawMsg, key) + case "sourceEntities": + err = unpopulate(val, "SourceEntities", &m.SourceEntities) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperTable. +func (m MapperTable) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "name", m.Name) + populate(objectMap, "properties", m.Properties) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperTable. +func (m *MapperTable) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "name": + err = unpopulate(val, "Name", &m.Name) + delete(rawMsg, key) + case "properties": + err = unpopulate(val, "Properties", &m.Properties) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperTableProperties. +func (m MapperTableProperties) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "dslConnectorProperties", m.DslConnectorProperties) + populate(objectMap, "schema", m.Schema) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperTableProperties. +func (m *MapperTableProperties) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "dslConnectorProperties": + err = unpopulate(val, "DslConnectorProperties", &m.DslConnectorProperties) + delete(rawMsg, key) + case "schema": + err = unpopulate(val, "Schema", &m.Schema) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperTableSchema. +func (m MapperTableSchema) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "dataType", m.DataType) + populate(objectMap, "name", m.Name) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperTableSchema. +func (m *MapperTableSchema) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "dataType": + err = unpopulate(val, "DataType", &m.DataType) + delete(rawMsg, key) + case "name": + err = unpopulate(val, "Name", &m.Name) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + +// MarshalJSON implements the json.Marshaller interface for type MapperTargetConnectionsInfo. +func (m MapperTargetConnectionsInfo) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "connection", m.Connection) + populate(objectMap, "dataMapperMappings", m.DataMapperMappings) + populate(objectMap, "relationships", m.Relationships) + populate(objectMap, "targetEntities", m.TargetEntities) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type MapperTargetConnectionsInfo. +func (m *MapperTargetConnectionsInfo) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "connection": + err = unpopulate(val, "Connection", &m.Connection) + delete(rawMsg, key) + case "dataMapperMappings": + err = unpopulate(val, "DataMapperMappings", &m.DataMapperMappings) + delete(rawMsg, key) + case "relationships": + err = unpopulate(val, "Relationships", &m.Relationships) + delete(rawMsg, key) + case "targetEntities": + err = unpopulate(val, "TargetEntities", &m.TargetEntities) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", m, err) + } + } + return nil +} + // MarshalJSON implements the json.Marshaller interface for type MappingDataFlow. func (m MappingDataFlow) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) @@ -29426,7 +30381,7 @@ func (m *MariaDBLinkedService) UnmarshalJSON(data []byte) error { func (m MariaDBLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", m.ConnectionString) - populateAny(objectMap, "encryptedCredential", m.EncryptedCredential) + populate(objectMap, "encryptedCredential", m.EncryptedCredential) populate(objectMap, "pwd", m.Pwd) return json.Marshal(objectMap) } @@ -29668,7 +30623,7 @@ func (m MarketoLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", m.ClientID) populate(objectMap, "clientSecret", m.ClientSecret) - populateAny(objectMap, "encryptedCredential", m.EncryptedCredential) + populate(objectMap, "encryptedCredential", m.EncryptedCredential) populateAny(objectMap, "endpoint", m.Endpoint) populateAny(objectMap, "useEncryptedEndpoints", m.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", m.UseHostVerification) @@ -29957,7 +30912,7 @@ func (m MicrosoftAccessLinkedServiceTypeProperties) MarshalJSON() ([]byte, error populateAny(objectMap, "authenticationType", m.AuthenticationType) populateAny(objectMap, "connectionString", m.ConnectionString) populate(objectMap, "credential", m.Credential) - populateAny(objectMap, "encryptedCredential", m.EncryptedCredential) + populate(objectMap, "encryptedCredential", m.EncryptedCredential) populate(objectMap, "password", m.Password) populateAny(objectMap, "userName", m.UserName) return json.Marshal(objectMap) @@ -30403,6 +31358,7 @@ func (m MongoDbAtlasLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", m.ConnectionString) populateAny(objectMap, "database", m.Database) + populateAny(objectMap, "driverVersion", m.DriverVersion) return json.Marshal(objectMap) } @@ -30421,6 +31377,9 @@ func (m *MongoDbAtlasLinkedServiceTypeProperties) UnmarshalJSON(data []byte) err case "database": err = unpopulate(val, "Database", &m.Database) delete(rawMsg, key) + case "driverVersion": + err = unpopulate(val, "DriverVersion", &m.DriverVersion) + delete(rawMsg, key) } if err != nil { return fmt.Errorf("unmarshalling type %T: %v", m, err) @@ -30802,7 +31761,7 @@ func (m MongoDbLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "authenticationType", m.AuthenticationType) populateAny(objectMap, "databaseName", m.DatabaseName) populateAny(objectMap, "enableSsl", m.EnableSSL) - populateAny(objectMap, "encryptedCredential", m.EncryptedCredential) + populate(objectMap, "encryptedCredential", m.EncryptedCredential) populate(objectMap, "password", m.Password) populateAny(objectMap, "port", m.Port) populateAny(objectMap, "server", m.Server) @@ -31389,7 +32348,7 @@ func (m *MySQLLinkedService) UnmarshalJSON(data []byte) error { func (m MySQLLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", m.ConnectionString) - populateAny(objectMap, "encryptedCredential", m.EncryptedCredential) + populate(objectMap, "encryptedCredential", m.EncryptedCredential) populate(objectMap, "password", m.Password) return json.Marshal(objectMap) } @@ -31657,7 +32616,7 @@ func (n *NetezzaLinkedService) UnmarshalJSON(data []byte) error { func (n NetezzaLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", n.ConnectionString) - populateAny(objectMap, "encryptedCredential", n.EncryptedCredential) + populate(objectMap, "encryptedCredential", n.EncryptedCredential) populate(objectMap, "pwd", n.Pwd) return json.Marshal(objectMap) } @@ -32011,7 +32970,7 @@ func (o ODataLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "authHeaders", o.AuthHeaders) populate(objectMap, "authenticationType", o.AuthenticationType) populateAny(objectMap, "azureCloudType", o.AzureCloudType) - populateAny(objectMap, "encryptedCredential", o.EncryptedCredential) + populate(objectMap, "encryptedCredential", o.EncryptedCredential) populate(objectMap, "password", o.Password) populate(objectMap, "servicePrincipalEmbeddedCert", o.ServicePrincipalEmbeddedCert) populate(objectMap, "servicePrincipalEmbeddedCertPassword", o.ServicePrincipalEmbeddedCertPassword) @@ -32321,7 +33280,7 @@ func (o OdbcLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "authenticationType", o.AuthenticationType) populateAny(objectMap, "connectionString", o.ConnectionString) populate(objectMap, "credential", o.Credential) - populateAny(objectMap, "encryptedCredential", o.EncryptedCredential) + populate(objectMap, "encryptedCredential", o.EncryptedCredential) populate(objectMap, "password", o.Password) populateAny(objectMap, "userName", o.UserName) return json.Marshal(objectMap) @@ -32773,7 +33732,7 @@ func (o *Office365LinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type Office365LinkedServiceTypeProperties. func (o Office365LinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", o.EncryptedCredential) + populate(objectMap, "encryptedCredential", o.EncryptedCredential) populateAny(objectMap, "office365TenantId", o.Office365TenantID) populateAny(objectMap, "servicePrincipalId", o.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", o.ServicePrincipalKey) @@ -33292,7 +34251,7 @@ func (o *OracleCloudStorageLinkedService) UnmarshalJSON(data []byte) error { func (o OracleCloudStorageLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "accessKeyId", o.AccessKeyID) - populateAny(objectMap, "encryptedCredential", o.EncryptedCredential) + populate(objectMap, "encryptedCredential", o.EncryptedCredential) populate(objectMap, "secretAccessKey", o.SecretAccessKey) populateAny(objectMap, "serviceUrl", o.ServiceURL) return json.Marshal(objectMap) @@ -33390,7 +34349,7 @@ func (o OracleCloudStorageReadSettings) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "deleteFilesAfterCompletion", o.DeleteFilesAfterCompletion) populateAny(objectMap, "disableMetricsCollection", o.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", o.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", o.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", o.FileListPath) populateAny(objectMap, "maxConcurrentConnections", o.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", o.ModifiedDatetimeEnd) @@ -33541,7 +34500,7 @@ func (o *OracleLinkedService) UnmarshalJSON(data []byte) error { func (o OracleLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", o.ConnectionString) - populateAny(objectMap, "encryptedCredential", o.EncryptedCredential) + populate(objectMap, "encryptedCredential", o.EncryptedCredential) populate(objectMap, "password", o.Password) return json.Marshal(objectMap) } @@ -33676,7 +34635,7 @@ func (o *OracleServiceCloudLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type OracleServiceCloudLinkedServiceTypeProperties. func (o OracleServiceCloudLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", o.EncryptedCredential) + populate(objectMap, "encryptedCredential", o.EncryptedCredential) populateAny(objectMap, "host", o.Host) populate(objectMap, "password", o.Password) populateAny(objectMap, "useEncryptedEndpoints", o.UseEncryptedEndpoints) @@ -34687,6 +35646,52 @@ func (p *ParquetFormat) UnmarshalJSON(data []byte) error { return nil } +// MarshalJSON implements the json.Marshaller interface for type ParquetReadSettings. +func (p ParquetReadSettings) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "compressionProperties", p.CompressionProperties) + objectMap["type"] = "ParquetReadSettings" + if p.AdditionalProperties != nil { + for key, val := range p.AdditionalProperties { + objectMap[key] = val + } + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type ParquetReadSettings. +func (p *ParquetReadSettings) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", p, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "compressionProperties": + p.CompressionProperties, err = unmarshalCompressionReadSettingsClassification(val) + delete(rawMsg, key) + case "type": + err = unpopulate(val, "Type", &p.Type) + delete(rawMsg, key) + default: + if p.AdditionalProperties == nil { + p.AdditionalProperties = map[string]any{} + } + if val != nil { + var aux any + err = json.Unmarshal(val, &aux) + p.AdditionalProperties[key] = aux + } + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", p, err) + } + } + return nil +} + // MarshalJSON implements the json.Marshaller interface for type ParquetSink. func (p ParquetSink) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) @@ -34766,6 +35771,7 @@ func (p ParquetSource) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "additionalColumns", p.AdditionalColumns) populateAny(objectMap, "disableMetricsCollection", p.DisableMetricsCollection) + populate(objectMap, "formatSettings", p.FormatSettings) populateAny(objectMap, "maxConcurrentConnections", p.MaxConcurrentConnections) populateAny(objectMap, "sourceRetryCount", p.SourceRetryCount) populateAny(objectMap, "sourceRetryWait", p.SourceRetryWait) @@ -34794,6 +35800,9 @@ func (p *ParquetSource) UnmarshalJSON(data []byte) error { case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &p.DisableMetricsCollection) delete(rawMsg, key) + case "formatSettings": + err = unpopulate(val, "FormatSettings", &p.FormatSettings) + delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &p.MaxConcurrentConnections) delete(rawMsg, key) @@ -34944,7 +35953,7 @@ func (p PaypalLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", p.ClientID) populate(objectMap, "clientSecret", p.ClientSecret) - populateAny(objectMap, "encryptedCredential", p.EncryptedCredential) + populate(objectMap, "encryptedCredential", p.EncryptedCredential) populateAny(objectMap, "host", p.Host) populateAny(objectMap, "useEncryptedEndpoints", p.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", p.UseHostVerification) @@ -35238,7 +36247,7 @@ func (p PhoenixLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "allowSelfSignedServerCert", p.AllowSelfSignedServerCert) populate(objectMap, "authenticationType", p.AuthenticationType) populateAny(objectMap, "enableSsl", p.EnableSSL) - populateAny(objectMap, "encryptedCredential", p.EncryptedCredential) + populate(objectMap, "encryptedCredential", p.EncryptedCredential) populateAny(objectMap, "httpPath", p.HTTPPath) populateAny(objectMap, "host", p.Host) populate(objectMap, "password", p.Password) @@ -35535,6 +36544,8 @@ func (p *PipelineElapsedTimeMetricPolicy) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type PipelineExternalComputeScaleProperties. func (p PipelineExternalComputeScaleProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) + populate(objectMap, "numberOfExternalNodes", p.NumberOfExternalNodes) + populate(objectMap, "numberOfPipelineNodes", p.NumberOfPipelineNodes) populate(objectMap, "timeToLive", p.TimeToLive) if p.AdditionalProperties != nil { for key, val := range p.AdditionalProperties { @@ -35553,6 +36564,12 @@ func (p *PipelineExternalComputeScaleProperties) UnmarshalJSON(data []byte) erro for key, val := range rawMsg { var err error switch key { + case "numberOfExternalNodes": + err = unpopulate(val, "NumberOfExternalNodes", &p.NumberOfExternalNodes) + delete(rawMsg, key) + case "numberOfPipelineNodes": + err = unpopulate(val, "NumberOfPipelineNodes", &p.NumberOfPipelineNodes) + delete(rawMsg, key) case "timeToLive": err = unpopulate(val, "TimeToLive", &p.TimeToLive) delete(rawMsg, key) @@ -36036,7 +37053,7 @@ func (p *PostgreSQLLinkedService) UnmarshalJSON(data []byte) error { func (p PostgreSQLLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", p.ConnectionString) - populateAny(objectMap, "encryptedCredential", p.EncryptedCredential) + populate(objectMap, "encryptedCredential", p.EncryptedCredential) populate(objectMap, "password", p.Password) return json.Marshal(objectMap) } @@ -36523,7 +37540,7 @@ func (p PrestoLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "authenticationType", p.AuthenticationType) populateAny(objectMap, "catalog", p.Catalog) populateAny(objectMap, "enableSsl", p.EnableSSL) - populateAny(objectMap, "encryptedCredential", p.EncryptedCredential) + populate(objectMap, "encryptedCredential", p.EncryptedCredential) populateAny(objectMap, "host", p.Host) populate(objectMap, "password", p.Password) populateAny(objectMap, "port", p.Port) @@ -37182,7 +38199,7 @@ func (q QuickBooksLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "connectionProperties", q.ConnectionProperties) populateAny(objectMap, "consumerKey", q.ConsumerKey) populate(objectMap, "consumerSecret", q.ConsumerSecret) - populateAny(objectMap, "encryptedCredential", q.EncryptedCredential) + populate(objectMap, "encryptedCredential", q.EncryptedCredential) populateAny(objectMap, "endpoint", q.Endpoint) populateAny(objectMap, "useEncryptedEndpoints", q.UseEncryptedEndpoints) return json.Marshal(objectMap) @@ -37441,7 +38458,7 @@ func (q *QuickbaseLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type QuickbaseLinkedServiceTypeProperties. func (q QuickbaseLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", q.EncryptedCredential) + populate(objectMap, "encryptedCredential", q.EncryptedCredential) populateAny(objectMap, "url", q.URL) populate(objectMap, "userToken", q.UserToken) return json.Marshal(objectMap) @@ -38020,7 +39037,7 @@ func (r ResponsysLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", r.ClientID) populate(objectMap, "clientSecret", r.ClientSecret) - populateAny(objectMap, "encryptedCredential", r.EncryptedCredential) + populate(objectMap, "encryptedCredential", r.EncryptedCredential) populateAny(objectMap, "endpoint", r.Endpoint) populateAny(objectMap, "useEncryptedEndpoints", r.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", r.UseHostVerification) @@ -38287,8 +39304,8 @@ func (r *RestResourceDataset) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type RestResourceDatasetTypeProperties. func (r RestResourceDatasetTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "additionalHeaders", r.AdditionalHeaders) - populateAny(objectMap, "paginationRules", r.PaginationRules) + populate(objectMap, "additionalHeaders", r.AdditionalHeaders) + populate(objectMap, "paginationRules", r.PaginationRules) populateAny(objectMap, "relativeUrl", r.RelativeURL) populateAny(objectMap, "requestBody", r.RequestBody) populateAny(objectMap, "requestMethod", r.RequestMethod) @@ -38400,7 +39417,7 @@ func (r RestServiceLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "clientSecret", r.ClientSecret) populate(objectMap, "credential", r.Credential) populateAny(objectMap, "enableServerCertificateValidation", r.EnableServerCertificateValidation) - populateAny(objectMap, "encryptedCredential", r.EncryptedCredential) + populate(objectMap, "encryptedCredential", r.EncryptedCredential) populate(objectMap, "password", r.Password) populateAny(objectMap, "resource", r.Resource) populateAny(objectMap, "scope", r.Scope) @@ -38942,6 +39959,7 @@ func (s SQLDWSource) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "additionalColumns", s.AdditionalColumns) populateAny(objectMap, "disableMetricsCollection", s.DisableMetricsCollection) + populateAny(objectMap, "isolationLevel", s.IsolationLevel) populateAny(objectMap, "maxConcurrentConnections", s.MaxConcurrentConnections) populateAny(objectMap, "partitionOption", s.PartitionOption) populate(objectMap, "partitionSettings", s.PartitionSettings) @@ -38975,6 +39993,9 @@ func (s *SQLDWSource) UnmarshalJSON(data []byte) error { case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &s.DisableMetricsCollection) delete(rawMsg, key) + case "isolationLevel": + err = unpopulate(val, "IsolationLevel", &s.IsolationLevel) + delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &s.MaxConcurrentConnections) delete(rawMsg, key) @@ -39161,6 +40182,7 @@ func (s SQLMISource) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "additionalColumns", s.AdditionalColumns) populateAny(objectMap, "disableMetricsCollection", s.DisableMetricsCollection) + populateAny(objectMap, "isolationLevel", s.IsolationLevel) populateAny(objectMap, "maxConcurrentConnections", s.MaxConcurrentConnections) populateAny(objectMap, "partitionOption", s.PartitionOption) populate(objectMap, "partitionSettings", s.PartitionSettings) @@ -39195,6 +40217,9 @@ func (s *SQLMISource) UnmarshalJSON(data []byte) error { case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &s.DisableMetricsCollection) delete(rawMsg, key) + case "isolationLevel": + err = unpopulate(val, "IsolationLevel", &s.IsolationLevel) + delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &s.MaxConcurrentConnections) delete(rawMsg, key) @@ -39348,7 +40373,7 @@ func (s SQLServerLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "alwaysEncryptedSettings", s.AlwaysEncryptedSettings) populateAny(objectMap, "connectionString", s.ConnectionString) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) populateAny(objectMap, "userName", s.UserName) return json.Marshal(objectMap) @@ -39493,6 +40518,7 @@ func (s SQLServerSource) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "additionalColumns", s.AdditionalColumns) populateAny(objectMap, "disableMetricsCollection", s.DisableMetricsCollection) + populateAny(objectMap, "isolationLevel", s.IsolationLevel) populateAny(objectMap, "maxConcurrentConnections", s.MaxConcurrentConnections) populateAny(objectMap, "partitionOption", s.PartitionOption) populate(objectMap, "partitionSettings", s.PartitionSettings) @@ -39527,6 +40553,9 @@ func (s *SQLServerSource) UnmarshalJSON(data []byte) error { case "disableMetricsCollection": err = unpopulate(val, "DisableMetricsCollection", &s.DisableMetricsCollection) delete(rawMsg, key) + case "isolationLevel": + err = unpopulate(val, "IsolationLevel", &s.IsolationLevel) + delete(rawMsg, key) case "maxConcurrentConnections": err = unpopulate(val, "MaxConcurrentConnections", &s.MaxConcurrentConnections) delete(rawMsg, key) @@ -39585,7 +40614,9 @@ func (s SQLServerStoredProcedureActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", s.Description) populate(objectMap, "linkedServiceName", s.LinkedServiceName) populate(objectMap, "name", s.Name) + populate(objectMap, "onInactiveMarkAs", s.OnInactiveMarkAs) populate(objectMap, "policy", s.Policy) + populate(objectMap, "state", s.State) objectMap["type"] = "SqlServerStoredProcedure" populate(objectMap, "typeProperties", s.TypeProperties) populate(objectMap, "userProperties", s.UserProperties) @@ -39618,9 +40649,15 @@ func (s *SQLServerStoredProcedureActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &s.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &s.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &s.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &s.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &s.Type) delete(rawMsg, key) @@ -40404,7 +41441,7 @@ func (s *SalesforceLinkedService) UnmarshalJSON(data []byte) error { func (s SalesforceLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "apiVersion", s.APIVersion) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "environmentUrl", s.EnvironmentURL) populate(objectMap, "password", s.Password) populate(objectMap, "securityToken", s.SecurityToken) @@ -40515,7 +41552,7 @@ func (s SalesforceMarketingCloudLinkedServiceTypeProperties) MarshalJSON() ([]by populateAny(objectMap, "clientId", s.ClientID) populate(objectMap, "clientSecret", s.ClientSecret) populateAny(objectMap, "connectionProperties", s.ConnectionProperties) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "useEncryptedEndpoints", s.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", s.UseHostVerification) populateAny(objectMap, "usePeerVerification", s.UsePeerVerification) @@ -40871,7 +41908,7 @@ func (s *SalesforceServiceCloudLinkedService) UnmarshalJSON(data []byte) error { func (s SalesforceServiceCloudLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "apiVersion", s.APIVersion) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "environmentUrl", s.EnvironmentURL) populateAny(objectMap, "extendedProperties", s.ExtendedProperties) populate(objectMap, "password", s.Password) @@ -41104,7 +42141,7 @@ func (s SalesforceServiceCloudSource) MarshalJSON() ([]byte, error) { populateAny(objectMap, "disableMetricsCollection", s.DisableMetricsCollection) populateAny(objectMap, "maxConcurrentConnections", s.MaxConcurrentConnections) populateAny(objectMap, "query", s.Query) - populate(objectMap, "readBehavior", s.ReadBehavior) + populateAny(objectMap, "readBehavior", s.ReadBehavior) populateAny(objectMap, "sourceRetryCount", s.SourceRetryCount) populateAny(objectMap, "sourceRetryWait", s.SourceRetryWait) objectMap["type"] = "SalesforceServiceCloudSource" @@ -41253,7 +42290,7 @@ func (s SalesforceSource) MarshalJSON() ([]byte, error) { populateAny(objectMap, "maxConcurrentConnections", s.MaxConcurrentConnections) populateAny(objectMap, "query", s.Query) populateAny(objectMap, "queryTimeout", s.QueryTimeout) - populate(objectMap, "readBehavior", s.ReadBehavior) + populateAny(objectMap, "readBehavior", s.ReadBehavior) populateAny(objectMap, "sourceRetryCount", s.SourceRetryCount) populateAny(objectMap, "sourceRetryWait", s.SourceRetryWait) objectMap["type"] = "SalesforceSource" @@ -41385,7 +42422,7 @@ func (s *SapBWLinkedService) UnmarshalJSON(data []byte) error { func (s SapBWLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", s.ClientID) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) populateAny(objectMap, "server", s.Server) populateAny(objectMap, "systemNumber", s.SystemNumber) @@ -41633,7 +42670,7 @@ func (s *SapCloudForCustomerLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type SapCloudForCustomerLinkedServiceTypeProperties. func (s SapCloudForCustomerLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) populateAny(objectMap, "url", s.URL) populateAny(objectMap, "username", s.Username) @@ -41985,8 +43022,8 @@ func (s SapEccLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) - populate(objectMap, "url", s.URL) - populate(objectMap, "username", s.Username) + populateAny(objectMap, "url", s.URL) + populateAny(objectMap, "username", s.Username) return json.Marshal(objectMap) } @@ -42261,7 +43298,7 @@ func (s SapHanaLinkedServiceProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "authenticationType", s.AuthenticationType) populateAny(objectMap, "connectionString", s.ConnectionString) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) populateAny(objectMap, "server", s.Server) populateAny(objectMap, "userName", s.UserName) @@ -42583,7 +43620,7 @@ func (s *SapOdpLinkedService) UnmarshalJSON(data []byte) error { func (s SapOdpLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", s.ClientID) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "language", s.Language) populateAny(objectMap, "logonGroup", s.LogonGroup) populateAny(objectMap, "messageServer", s.MessageServer) @@ -42927,7 +43964,7 @@ func (s *SapOpenHubLinkedService) UnmarshalJSON(data []byte) error { func (s SapOpenHubLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", s.ClientID) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "language", s.Language) populateAny(objectMap, "logonGroup", s.LogonGroup) populateAny(objectMap, "messageServer", s.MessageServer) @@ -43247,7 +44284,7 @@ func (s *SapTableLinkedService) UnmarshalJSON(data []byte) error { func (s SapTableLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "clientId", s.ClientID) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "language", s.Language) populateAny(objectMap, "logonGroup", s.LogonGroup) populateAny(objectMap, "messageServer", s.MessageServer) @@ -43769,7 +44806,9 @@ func (s ScriptActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", s.Description) populate(objectMap, "linkedServiceName", s.LinkedServiceName) populate(objectMap, "name", s.Name) + populate(objectMap, "onInactiveMarkAs", s.OnInactiveMarkAs) populate(objectMap, "policy", s.Policy) + populate(objectMap, "state", s.State) objectMap["type"] = "Script" populate(objectMap, "typeProperties", s.TypeProperties) populate(objectMap, "userProperties", s.UserProperties) @@ -43802,9 +44841,15 @@ func (s *ScriptActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &s.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &s.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &s.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &s.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &s.Type) delete(rawMsg, key) @@ -44003,6 +45048,37 @@ func (s *SecretBase) UnmarshalJSON(data []byte) error { return nil } +// MarshalJSON implements the json.Marshaller interface for type SecureInputOutputPolicy. +func (s SecureInputOutputPolicy) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]any) + populate(objectMap, "secureInput", s.SecureInput) + populate(objectMap, "secureOutput", s.SecureOutput) + return json.Marshal(objectMap) +} + +// UnmarshalJSON implements the json.Unmarshaller interface for type SecureInputOutputPolicy. +func (s *SecureInputOutputPolicy) UnmarshalJSON(data []byte) error { + var rawMsg map[string]json.RawMessage + if err := json.Unmarshal(data, &rawMsg); err != nil { + return fmt.Errorf("unmarshalling type %T: %v", s, err) + } + for key, val := range rawMsg { + var err error + switch key { + case "secureInput": + err = unpopulate(val, "SecureInput", &s.SecureInput) + delete(rawMsg, key) + case "secureOutput": + err = unpopulate(val, "SecureOutput", &s.SecureOutput) + delete(rawMsg, key) + } + if err != nil { + return fmt.Errorf("unmarshalling type %T: %v", s, err) + } + } + return nil +} + // MarshalJSON implements the json.Marshaller interface for type SecureString. func (s SecureString) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) @@ -44297,6 +45373,7 @@ func (s SelfHostedIntegrationRuntimeStatusTypeProperties) MarshalJSON() ([]byte, populate(objectMap, "nodes", s.Nodes) populate(objectMap, "pushedVersion", s.PushedVersion) populateTimeRFC3339(objectMap, "scheduledUpdateDate", s.ScheduledUpdateDate) + populate(objectMap, "selfContainedInteractiveAuthoringEnabled", s.SelfContainedInteractiveAuthoringEnabled) populate(objectMap, "serviceUrls", s.ServiceUrls) populate(objectMap, "taskQueueId", s.TaskQueueID) populate(objectMap, "updateDelayOffset", s.UpdateDelayOffset) @@ -44347,6 +45424,9 @@ func (s *SelfHostedIntegrationRuntimeStatusTypeProperties) UnmarshalJSON(data [] case "scheduledUpdateDate": err = unpopulateTimeRFC3339(val, "ScheduledUpdateDate", &s.ScheduledUpdateDate) delete(rawMsg, key) + case "selfContainedInteractiveAuthoringEnabled": + err = unpopulate(val, "SelfContainedInteractiveAuthoringEnabled", &s.SelfContainedInteractiveAuthoringEnabled) + delete(rawMsg, key) case "serviceUrls": err = unpopulate(val, "ServiceUrls", &s.ServiceUrls) delete(rawMsg, key) @@ -44374,6 +45454,7 @@ func (s *SelfHostedIntegrationRuntimeStatusTypeProperties) UnmarshalJSON(data [] func (s SelfHostedIntegrationRuntimeTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "linkedInfo", s.LinkedInfo) + populate(objectMap, "selfContainedInteractiveAuthoringEnabled", s.SelfContainedInteractiveAuthoringEnabled) return json.Marshal(objectMap) } @@ -44389,6 +45470,9 @@ func (s *SelfHostedIntegrationRuntimeTypeProperties) UnmarshalJSON(data []byte) case "linkedInfo": s.LinkedInfo, err = unmarshalLinkedIntegrationRuntimeTypeClassification(val) delete(rawMsg, key) + case "selfContainedInteractiveAuthoringEnabled": + err = unpopulate(val, "SelfContainedInteractiveAuthoringEnabled", &s.SelfContainedInteractiveAuthoringEnabled) + delete(rawMsg, key) } if err != nil { return fmt.Errorf("unmarshalling type %T: %v", s, err) @@ -44465,7 +45549,7 @@ func (s ServiceNowLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populate(objectMap, "authenticationType", s.AuthenticationType) populateAny(objectMap, "clientId", s.ClientID) populate(objectMap, "clientSecret", s.ClientSecret) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "endpoint", s.Endpoint) populate(objectMap, "password", s.Password) populateAny(objectMap, "useEncryptedEndpoints", s.UseEncryptedEndpoints) @@ -44761,6 +45845,9 @@ func (s SetVariableActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", s.DependsOn) populate(objectMap, "description", s.Description) populate(objectMap, "name", s.Name) + populate(objectMap, "onInactiveMarkAs", s.OnInactiveMarkAs) + populate(objectMap, "policy", s.Policy) + populate(objectMap, "state", s.State) objectMap["type"] = "SetVariable" populate(objectMap, "typeProperties", s.TypeProperties) populate(objectMap, "userProperties", s.UserProperties) @@ -44790,6 +45877,15 @@ func (s *SetVariableActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &s.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &s.OnInactiveMarkAs) + delete(rawMsg, key) + case "policy": + err = unpopulate(val, "Policy", &s.Policy) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &s.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &s.Type) delete(rawMsg, key) @@ -44820,6 +45916,7 @@ func (s *SetVariableActivity) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type SetVariableActivityTypeProperties. func (s SetVariableActivityTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) + populate(objectMap, "setSystemVariable", s.SetSystemVariable) populateAny(objectMap, "value", s.Value) populate(objectMap, "variableName", s.VariableName) return json.Marshal(objectMap) @@ -44834,6 +45931,9 @@ func (s *SetVariableActivityTypeProperties) UnmarshalJSON(data []byte) error { for key, val := range rawMsg { var err error switch key { + case "setSystemVariable": + err = unpopulate(val, "SetSystemVariable", &s.SetSystemVariable) + delete(rawMsg, key) case "value": err = unpopulate(val, "Value", &s.Value) delete(rawMsg, key) @@ -44904,7 +46004,7 @@ func (s SftpReadSettings) MarshalJSON() ([]byte, error) { populateAny(objectMap, "deleteFilesAfterCompletion", s.DeleteFilesAfterCompletion) populateAny(objectMap, "disableChunking", s.DisableChunking) populateAny(objectMap, "disableMetricsCollection", s.DisableMetricsCollection) - populate(objectMap, "enablePartitionDiscovery", s.EnablePartitionDiscovery) + populateAny(objectMap, "enablePartitionDiscovery", s.EnablePartitionDiscovery) populateAny(objectMap, "fileListPath", s.FileListPath) populateAny(objectMap, "maxConcurrentConnections", s.MaxConcurrentConnections) populateAny(objectMap, "modifiedDatetimeEnd", s.ModifiedDatetimeEnd) @@ -45054,7 +46154,7 @@ func (s *SftpServerLinkedService) UnmarshalJSON(data []byte) error { func (s SftpServerLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "authenticationType", s.AuthenticationType) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "host", s.Host) populateAny(objectMap, "hostKeyFingerprint", s.HostKeyFingerprint) populate(objectMap, "passPhrase", s.PassPhrase) @@ -45271,7 +46371,7 @@ func (s *SharePointOnlineListLinkedService) UnmarshalJSON(data []byte) error { // MarshalJSON implements the json.Marshaller interface for type SharePointOnlineListLinkedServiceTypeProperties. func (s SharePointOnlineListLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "servicePrincipalId", s.ServicePrincipalID) populate(objectMap, "servicePrincipalKey", s.ServicePrincipalKey) populateAny(objectMap, "siteUrl", s.SiteURL) @@ -45517,7 +46617,7 @@ func (s *ShopifyLinkedService) UnmarshalJSON(data []byte) error { func (s ShopifyLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "accessToken", s.AccessToken) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "host", s.Host) populateAny(objectMap, "useEncryptedEndpoints", s.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", s.UseHostVerification) @@ -45801,7 +46901,7 @@ func (s *SmartsheetLinkedService) UnmarshalJSON(data []byte) error { func (s SmartsheetLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiToken", s.APIToken) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) return json.Marshal(objectMap) } @@ -46099,7 +47199,7 @@ func (s *SnowflakeLinkedService) UnmarshalJSON(data []byte) error { func (s SnowflakeLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", s.ConnectionString) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) return json.Marshal(objectMap) } @@ -46405,7 +47505,7 @@ func (s SparkLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "allowSelfSignedServerCert", s.AllowSelfSignedServerCert) populate(objectMap, "authenticationType", s.AuthenticationType) populateAny(objectMap, "enableSsl", s.EnableSSL) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "httpPath", s.HTTPPath) populateAny(objectMap, "host", s.Host) populate(objectMap, "password", s.Password) @@ -46689,7 +47789,7 @@ func (s SquareLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { populateAny(objectMap, "clientId", s.ClientID) populate(objectMap, "clientSecret", s.ClientSecret) populateAny(objectMap, "connectionProperties", s.ConnectionProperties) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populateAny(objectMap, "host", s.Host) populateAny(objectMap, "redirectUri", s.RedirectURI) populateAny(objectMap, "useEncryptedEndpoints", s.UseEncryptedEndpoints) @@ -47512,6 +48612,8 @@ func (s SwitchActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", s.DependsOn) populate(objectMap, "description", s.Description) populate(objectMap, "name", s.Name) + populate(objectMap, "onInactiveMarkAs", s.OnInactiveMarkAs) + populate(objectMap, "state", s.State) objectMap["type"] = "Switch" populate(objectMap, "typeProperties", s.TypeProperties) populate(objectMap, "userProperties", s.UserProperties) @@ -47541,6 +48643,12 @@ func (s *SwitchActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &s.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &s.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &s.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &s.Type) delete(rawMsg, key) @@ -47701,7 +48809,7 @@ func (s SybaseLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "authenticationType", s.AuthenticationType) populateAny(objectMap, "database", s.Database) - populateAny(objectMap, "encryptedCredential", s.EncryptedCredential) + populate(objectMap, "encryptedCredential", s.EncryptedCredential) populate(objectMap, "password", s.Password) populateAny(objectMap, "schema", s.Schema) populateAny(objectMap, "server", s.Server) @@ -47925,7 +49033,9 @@ func (s SynapseNotebookActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", s.Description) populate(objectMap, "linkedServiceName", s.LinkedServiceName) populate(objectMap, "name", s.Name) + populate(objectMap, "onInactiveMarkAs", s.OnInactiveMarkAs) populate(objectMap, "policy", s.Policy) + populate(objectMap, "state", s.State) objectMap["type"] = "SynapseNotebook" populate(objectMap, "typeProperties", s.TypeProperties) populate(objectMap, "userProperties", s.UserProperties) @@ -47958,9 +49068,15 @@ func (s *SynapseNotebookActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &s.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &s.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &s.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &s.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &s.Type) delete(rawMsg, key) @@ -47992,12 +49108,15 @@ func (s *SynapseNotebookActivity) UnmarshalJSON(data []byte) error { func (s SynapseNotebookActivityTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "conf", s.Conf) + populate(objectMap, "configurationType", s.ConfigurationType) populateAny(objectMap, "driverSize", s.DriverSize) populateAny(objectMap, "executorSize", s.ExecutorSize) populate(objectMap, "notebook", s.Notebook) - populate(objectMap, "numExecutors", s.NumExecutors) + populateAny(objectMap, "numExecutors", s.NumExecutors) populate(objectMap, "parameters", s.Parameters) + populate(objectMap, "sparkConfig", s.SparkConfig) populate(objectMap, "sparkPool", s.SparkPool) + populate(objectMap, "targetSparkConfiguration", s.TargetSparkConfiguration) return json.Marshal(objectMap) } @@ -48013,6 +49132,9 @@ func (s *SynapseNotebookActivityTypeProperties) UnmarshalJSON(data []byte) error case "conf": err = unpopulate(val, "Conf", &s.Conf) delete(rawMsg, key) + case "configurationType": + err = unpopulate(val, "ConfigurationType", &s.ConfigurationType) + delete(rawMsg, key) case "driverSize": err = unpopulate(val, "DriverSize", &s.DriverSize) delete(rawMsg, key) @@ -48028,9 +49150,15 @@ func (s *SynapseNotebookActivityTypeProperties) UnmarshalJSON(data []byte) error case "parameters": err = unpopulate(val, "Parameters", &s.Parameters) delete(rawMsg, key) + case "sparkConfig": + err = unpopulate(val, "SparkConfig", &s.SparkConfig) + delete(rawMsg, key) case "sparkPool": err = unpopulate(val, "SparkPool", &s.SparkPool) delete(rawMsg, key) + case "targetSparkConfiguration": + err = unpopulate(val, "TargetSparkConfiguration", &s.TargetSparkConfiguration) + delete(rawMsg, key) } if err != nil { return fmt.Errorf("unmarshalling type %T: %v", s, err) @@ -48164,7 +49292,9 @@ func (s SynapseSparkJobDefinitionActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", s.Description) populate(objectMap, "linkedServiceName", s.LinkedServiceName) populate(objectMap, "name", s.Name) + populate(objectMap, "onInactiveMarkAs", s.OnInactiveMarkAs) populate(objectMap, "policy", s.Policy) + populate(objectMap, "state", s.State) objectMap["type"] = "SparkJob" populate(objectMap, "typeProperties", s.TypeProperties) populate(objectMap, "userProperties", s.UserProperties) @@ -48197,9 +49327,15 @@ func (s *SynapseSparkJobDefinitionActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &s.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &s.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &s.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &s.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &s.Type) delete(rawMsg, key) @@ -48483,7 +49619,7 @@ func (t TeamDeskLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiToken", t.APIToken) populate(objectMap, "authenticationType", t.AuthenticationType) - populateAny(objectMap, "encryptedCredential", t.EncryptedCredential) + populate(objectMap, "encryptedCredential", t.EncryptedCredential) populate(objectMap, "password", t.Password) populateAny(objectMap, "url", t.URL) populateAny(objectMap, "userName", t.UserName) @@ -48592,7 +49728,7 @@ func (t TeradataLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "authenticationType", t.AuthenticationType) populateAny(objectMap, "connectionString", t.ConnectionString) - populateAny(objectMap, "encryptedCredential", t.EncryptedCredential) + populate(objectMap, "encryptedCredential", t.EncryptedCredential) populate(objectMap, "password", t.Password) populateAny(objectMap, "server", t.Server) populateAny(objectMap, "username", t.Username) @@ -49659,6 +50795,8 @@ func (u UntilActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", u.DependsOn) populate(objectMap, "description", u.Description) populate(objectMap, "name", u.Name) + populate(objectMap, "onInactiveMarkAs", u.OnInactiveMarkAs) + populate(objectMap, "state", u.State) objectMap["type"] = "Until" populate(objectMap, "typeProperties", u.TypeProperties) populate(objectMap, "userProperties", u.UserProperties) @@ -49688,6 +50826,12 @@ func (u *UntilActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &u.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &u.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &u.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &u.Type) delete(rawMsg, key) @@ -49888,6 +51032,8 @@ func (v ValidationActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", v.DependsOn) populate(objectMap, "description", v.Description) populate(objectMap, "name", v.Name) + populate(objectMap, "onInactiveMarkAs", v.OnInactiveMarkAs) + populate(objectMap, "state", v.State) objectMap["type"] = "Validation" populate(objectMap, "typeProperties", v.TypeProperties) populate(objectMap, "userProperties", v.UserProperties) @@ -49917,6 +51063,12 @@ func (v *ValidationActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &v.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &v.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &v.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &v.Type) delete(rawMsg, key) @@ -50119,7 +51271,7 @@ func (v *VerticaLinkedService) UnmarshalJSON(data []byte) error { func (v VerticaLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionString", v.ConnectionString) - populateAny(objectMap, "encryptedCredential", v.EncryptedCredential) + populate(objectMap, "encryptedCredential", v.EncryptedCredential) populate(objectMap, "pwd", v.Pwd) return json.Marshal(objectMap) } @@ -50300,6 +51452,8 @@ func (w WaitActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", w.DependsOn) populate(objectMap, "description", w.Description) populate(objectMap, "name", w.Name) + populate(objectMap, "onInactiveMarkAs", w.OnInactiveMarkAs) + populate(objectMap, "state", w.State) objectMap["type"] = "Wait" populate(objectMap, "typeProperties", w.TypeProperties) populate(objectMap, "userProperties", w.UserProperties) @@ -50329,6 +51483,12 @@ func (w *WaitActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &w.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &w.OnInactiveMarkAs) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &w.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &w.Type) delete(rawMsg, key) @@ -50390,7 +51550,9 @@ func (w WebActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "description", w.Description) populate(objectMap, "linkedServiceName", w.LinkedServiceName) populate(objectMap, "name", w.Name) + populate(objectMap, "onInactiveMarkAs", w.OnInactiveMarkAs) populate(objectMap, "policy", w.Policy) + populate(objectMap, "state", w.State) objectMap["type"] = "WebActivity" populate(objectMap, "typeProperties", w.TypeProperties) populate(objectMap, "userProperties", w.UserProperties) @@ -50423,9 +51585,15 @@ func (w *WebActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &w.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &w.OnInactiveMarkAs) + delete(rawMsg, key) case "policy": err = unpopulate(val, "Policy", &w.Policy) delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &w.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &w.Type) delete(rawMsg, key) @@ -50678,6 +51846,9 @@ func (w WebHookActivity) MarshalJSON() ([]byte, error) { populate(objectMap, "dependsOn", w.DependsOn) populate(objectMap, "description", w.Description) populate(objectMap, "name", w.Name) + populate(objectMap, "onInactiveMarkAs", w.OnInactiveMarkAs) + populate(objectMap, "policy", w.Policy) + populate(objectMap, "state", w.State) objectMap["type"] = "WebHook" populate(objectMap, "typeProperties", w.TypeProperties) populate(objectMap, "userProperties", w.UserProperties) @@ -50707,6 +51878,15 @@ func (w *WebHookActivity) UnmarshalJSON(data []byte) error { case "name": err = unpopulate(val, "Name", &w.Name) delete(rawMsg, key) + case "onInactiveMarkAs": + err = unpopulate(val, "OnInactiveMarkAs", &w.OnInactiveMarkAs) + delete(rawMsg, key) + case "policy": + err = unpopulate(val, "Policy", &w.Policy) + delete(rawMsg, key) + case "state": + err = unpopulate(val, "State", &w.State) + delete(rawMsg, key) case "type": err = unpopulate(val, "Type", &w.Type) delete(rawMsg, key) @@ -51400,7 +52580,7 @@ func (x XeroLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populateAny(objectMap, "connectionProperties", x.ConnectionProperties) populate(objectMap, "consumerKey", x.ConsumerKey) - populateAny(objectMap, "encryptedCredential", x.EncryptedCredential) + populate(objectMap, "encryptedCredential", x.EncryptedCredential) populateAny(objectMap, "host", x.Host) populate(objectMap, "privateKey", x.PrivateKey) populateAny(objectMap, "useEncryptedEndpoints", x.UseEncryptedEndpoints) @@ -51661,7 +52841,7 @@ func (z ZendeskLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "apiToken", z.APIToken) populate(objectMap, "authenticationType", z.AuthenticationType) - populateAny(objectMap, "encryptedCredential", z.EncryptedCredential) + populate(objectMap, "encryptedCredential", z.EncryptedCredential) populate(objectMap, "password", z.Password) populateAny(objectMap, "url", z.URL) populateAny(objectMap, "userName", z.UserName) @@ -51816,7 +52996,7 @@ func (z ZohoLinkedServiceTypeProperties) MarshalJSON() ([]byte, error) { objectMap := make(map[string]any) populate(objectMap, "accessToken", z.AccessToken) populateAny(objectMap, "connectionProperties", z.ConnectionProperties) - populateAny(objectMap, "encryptedCredential", z.EncryptedCredential) + populate(objectMap, "encryptedCredential", z.EncryptedCredential) populateAny(objectMap, "endpoint", z.Endpoint) populateAny(objectMap, "useEncryptedEndpoints", z.UseEncryptedEndpoints) populateAny(objectMap, "useHostVerification", z.UseHostVerification) diff --git a/sdk/resourcemanager/datafactory/armdatafactory/operations_client.go b/sdk/resourcemanager/datafactory/armdatafactory/operations_client.go index 9d8805492df6..2c665478fd44 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/operations_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/operations_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory diff --git a/sdk/resourcemanager/datafactory/armdatafactory/operations_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/operations_client_example_test.go deleted file mode 100644 index fe5b40456363..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/operations_client_example_test.go +++ /dev/null @@ -1,1044 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Operations_List.json -func ExampleOperationsClient_NewListPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewOperationsClient().NewListPager(nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.OperationListResponse = armdatafactory.OperationListResponse{ - // Value: []*armdatafactory.Operation{ - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/providers/Microsoft.Insights/metricDefinitions/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the available metrics for datafactories"), - // Operation: to.Ptr("Read datafactories metric definitions"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("datafactories"), - // }, - // Origin: to.Ptr("system"), - // Properties: &armdatafactory.OperationProperties{ - // ServiceSpecification: &armdatafactory.OperationServiceSpecification{ - // MetricSpecifications: []*armdatafactory.OperationMetricSpecification{ - // { - // Name: to.Ptr("FailedRuns"), - // AggregationType: to.Ptr("Total"), - // Availabilities: []*armdatafactory.OperationMetricAvailability{ - // { - // BlobDuration: to.Ptr("P1D"), - // TimeGrain: to.Ptr("PT1H"), - // }}, - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("pipelineName"), - // DisplayName: to.Ptr("Pipeline"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("activityName"), - // DisplayName: to.Ptr("Activity"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("windowEnd"), - // DisplayName: to.Ptr("Window End"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("windowStart"), - // DisplayName: to.Ptr("Window Start"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Failed Runs"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetricsV1"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("SuccessfulRuns"), - // AggregationType: to.Ptr("Total"), - // Availabilities: []*armdatafactory.OperationMetricAvailability{ - // { - // BlobDuration: to.Ptr("P1D"), - // TimeGrain: to.Ptr("PT1H"), - // }}, - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("pipelineName"), - // DisplayName: to.Ptr("Pipeline"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("activityName"), - // DisplayName: to.Ptr("Activity"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("windowEnd"), - // DisplayName: to.Ptr("Window End"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("windowStart"), - // DisplayName: to.Ptr("Window Start"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Successful Runs"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetricsV1"), - // Unit: to.Ptr("Count"), - // }}, - // }, - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/providers/Microsoft.Insights/diagnosticSettings/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the diagnostic setting for the resource"), - // Operation: to.Ptr("Read diagnostic setting"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("datafactories"), - // }, - // Origin: to.Ptr("system"), - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/providers/Microsoft.Insights/diagnosticSettings/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or updates the diagnostic setting for the resource"), - // Operation: to.Ptr("Write diagnostic setting"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("datafactories"), - // }, - // Origin: to.Ptr("system"), - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/providers/Microsoft.Insights/logDefinitions/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the available logs for factories"), - // Operation: to.Ptr("Read factories log definitions"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("The log definition of factories"), - // }, - // Origin: to.Ptr("system"), - // Properties: &armdatafactory.OperationProperties{ - // ServiceSpecification: &armdatafactory.OperationServiceSpecification{ - // LogSpecifications: []*armdatafactory.OperationLogSpecification{ - // { - // Name: to.Ptr("ActivityRuns"), - // BlobDuration: to.Ptr("PT1H"), - // DisplayName: to.Ptr("Pipeline activity runs log"), - // }, - // { - // Name: to.Ptr("PipelineRuns"), - // BlobDuration: to.Ptr("PT1H"), - // DisplayName: to.Ptr("Pipeline runs log"), - // }, - // { - // Name: to.Ptr("TriggerRuns"), - // BlobDuration: to.Ptr("PT1H"), - // DisplayName: to.Ptr("Trigger runs log"), - // }}, - // }, - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/providers/Microsoft.Insights/diagnosticSettings/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the diagnostic setting for the resource"), - // Operation: to.Ptr("Read diagnostic setting"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("factories"), - // }, - // Origin: to.Ptr("system"), - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/providers/Microsoft.Insights/diagnosticSettings/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or updates the diagnostic setting for the resource"), - // Operation: to.Ptr("Write diagnostic setting"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("factories"), - // }, - // Origin: to.Ptr("system"), - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/providers/Microsoft.Insights/metricDefinitions/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the available metrics for factories"), - // Operation: to.Ptr("Read metric definitions"), - // Provider: to.Ptr("Microsoft.DataFactory"), - // Resource: to.Ptr("factories"), - // }, - // Origin: to.Ptr("system"), - // Properties: &armdatafactory.OperationProperties{ - // ServiceSpecification: &armdatafactory.OperationServiceSpecification{ - // MetricSpecifications: []*armdatafactory.OperationMetricSpecification{ - // { - // Name: to.Ptr("PipelineFailedRuns"), - // AggregationType: to.Ptr("Total"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("FailureType"), - // DisplayName: to.Ptr("Failure Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("Name"), - // DisplayName: to.Ptr("Pipeline"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Failed pipeline runs metrics"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("PipelineSucceededRuns"), - // AggregationType: to.Ptr("Total"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("FailureType"), - // DisplayName: to.Ptr("Failure Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("Name"), - // DisplayName: to.Ptr("Pipeline"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Succeeded pipeline runs metrics"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("ActivityFailedRuns"), - // AggregationType: to.Ptr("Total"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("ActivityType"), - // DisplayName: to.Ptr("Activity Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("PipelineName"), - // DisplayName: to.Ptr("Pipeline"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("FailureType"), - // DisplayName: to.Ptr("Failure Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("Name"), - // DisplayName: to.Ptr("Activity"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Failed activity runs metrics"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("ActivitySucceededRuns"), - // AggregationType: to.Ptr("Total"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("ActivityType"), - // DisplayName: to.Ptr("Activity Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("PipelineName"), - // DisplayName: to.Ptr("Pipeline"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("FailureType"), - // DisplayName: to.Ptr("Failure Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("Name"), - // DisplayName: to.Ptr("Activity"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Succeeded activity runs metrics"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("TriggerFailedRuns"), - // AggregationType: to.Ptr("Total"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("Name"), - // DisplayName: to.Ptr("Trigger"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("FailureType"), - // DisplayName: to.Ptr("Failure Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Failed trigger runs metrics"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("TriggerSucceededRuns"), - // AggregationType: to.Ptr("Total"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("Name"), - // DisplayName: to.Ptr("Trigger"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("FailureType"), - // DisplayName: to.Ptr("Failure Type"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Succeeded trigger runs metrics"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Count"), - // }, - // { - // Name: to.Ptr("IntegrationRuntimeCpuPercentage"), - // AggregationType: to.Ptr("Average"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("IntegrationRuntimeName"), - // DisplayName: to.Ptr("Integration Runtime"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("NodeName"), - // DisplayName: to.Ptr("Integration Runtime Node"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Integration runtime CPU utilization"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Percent"), - // }, - // { - // Name: to.Ptr("IntegrationRuntimeAvailableMemory"), - // AggregationType: to.Ptr("Average"), - // Dimensions: []*armdatafactory.OperationMetricDimension{ - // { - // Name: to.Ptr("IntegrationRuntimeName"), - // DisplayName: to.Ptr("Integration Runtime"), - // ToBeExportedForShoebox: to.Ptr(true), - // }, - // { - // Name: to.Ptr("NodeName"), - // DisplayName: to.Ptr("Integration Runtime Node"), - // ToBeExportedForShoebox: to.Ptr(true), - // }}, - // DisplayName: to.Ptr("Integration runtime available memory"), - // EnableRegionalMdmAccount: to.Ptr("false"), - // SourceMdmAccount: to.Ptr("MicrosoftDataFactoryProdShoebox"), - // SourceMdmNamespace: to.Ptr("ADFMetrics"), - // Unit: to.Ptr("Bytes"), - // }}, - // }, - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/register/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Registers the subscription for the Data Factory Resource Provider."), - // Operation: to.Ptr("Register Data Factory Resource Provider"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory Resource Provider"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/unregister/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Unregisters the subscription for the Data Factory Resource Provider."), - // Operation: to.Ptr("Unregister Data Factory Resource Provider"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory Resource Provider"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads the Data Factory."), - // Operation: to.Ptr("Read Data Factory"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates the Data Factory."), - // Operation: to.Ptr("Create or Update Data Factory"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes the Data Factory."), - // Operation: to.Ptr("Delete Data Factory"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/linkedServices/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Linked Service."), - // Operation: to.Ptr("Read Linked Service"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Linked Service"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/linkedServices/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Linked Service."), - // Operation: to.Ptr("Delete Linked Service"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Linked Service"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/linkedServices/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Linked Service."), - // Operation: to.Ptr("Create or Update Linked Service"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Linked Service"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Pipeline."), - // Operation: to.Ptr("Read Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Pipeline."), - // Operation: to.Ptr("Delete Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/pause/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Pauses any Pipeline."), - // Operation: to.Ptr("Pause Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/resume/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Resumes any Pipeline."), - // Operation: to.Ptr("Resume Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/update/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Updates any Pipeline."), - // Operation: to.Ptr("Update Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Pipeline."), - // Operation: to.Ptr("Create or Update Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Dataset."), - // Operation: to.Ptr("Read Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Dataset."), - // Operation: to.Ptr("Delete Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Dataset."), - // Operation: to.Ptr("Create or Update Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/tables/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Dataset."), - // Operation: to.Ptr("Read Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/tables/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Dataset."), - // Operation: to.Ptr("Delete Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/tables/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Dataset."), - // Operation: to.Ptr("Create or Update Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/slices/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the Data Slices in the given period."), - // Operation: to.Ptr("Read Data Slices"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Slice"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/slices/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Update the Status of the Data Slice."), - // Operation: to.Ptr("Update Data Slice Status"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Slice"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/sliceruns/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads the Data Slice Run for the given dataset with the given start time."), - // Operation: to.Ptr("Read Data Slice Run"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Slice"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/runs/loginfo/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads a SAS URI to a blob container containing the logs."), - // Operation: to.Ptr("Read Run Log Info"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Run Log"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/activitywindows/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Activity Windows in the Data Factory with specified parameters."), - // Operation: to.Ptr("Read Activity Windows in any Data Factory"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Activity Windows"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/activitywindows/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Activity Windows for the Pipeline with specified parameters."), - // Operation: to.Ptr("Read Activity Windows for any Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Activity Windows"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datapipelines/activities/activitywindows/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Activity Windows for the Pipeline Activity with specified parameters."), - // Operation: to.Ptr("Read Activity Windows for any Pipeline Activity"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Activity Windows"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/datasets/activitywindows/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Activity Windows for the Dataset with specified parameters."), - // Operation: to.Ptr("Read Activity Windows for any Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Activity Windows"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/gateways/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Gateway."), - // Operation: to.Ptr("Read Gateway"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Gateway"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/gateways/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Gateway."), - // Operation: to.Ptr("Create or Update Gateway"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Gateway"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/gateways/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Gateway."), - // Operation: to.Ptr("Delete Gateway"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Gateway"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/gateways/connectioninfo/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads the Connection Info for any Gateway."), - // Operation: to.Ptr("Read Gateway Connection Info"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Gateway"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/gateways/listauthkeys/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Lists the Authentication Keys for any Gateway."), - // Operation: to.Ptr("List Gateway Authentication Keys"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Gateway"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/datafactories/gateways/regenerateauthkey/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Regenerates the Authentication Keys for any Gateway."), - // Operation: to.Ptr("Regenerate Gateway Authentication Keys"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Gateway"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Data Factory."), - // Operation: to.Ptr("Read Data Factory"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Create or Update Data Factory"), - // Operation: to.Ptr("Create or Update any Data Factory."), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes Data Factory."), - // Operation: to.Ptr("Delete Data Factory"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/linkedServices/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Linked Service."), - // Operation: to.Ptr("Read Linked Service"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Linked Service"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/linkedServices/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes Linked Service."), - // Operation: to.Ptr("Delete Linked Service"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Linked Service"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/linkedServices/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Create or Update Linked Service"), - // Operation: to.Ptr("Create or Update any Linked Service."), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Linked Service"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/pipelines/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Pipeline."), - // Operation: to.Ptr("Read Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/pipelines/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes Pipeline."), - // Operation: to.Ptr("Delete Pipeline"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/pipelines/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Create or Update Pipeline"), - // Operation: to.Ptr("Create or Update any Pipeline."), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/pipelines/createrun/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates a run for the Pipeline."), - // Operation: to.Ptr("Create Pipeline Run"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Pipeline"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/cancelpipelinerun/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Cancels the pipeline run specified by the run ID."), - // Operation: to.Ptr("Cancel any Pipeline Run"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/pipelineruns/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads the Pipeline Runs."), - // Operation: to.Ptr("Read Pipeline Runs"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/pipelineruns/activityruns/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads the activity runs for the specified pipeline run ID."), - // Operation: to.Ptr("Read Activity Runs"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/datasets/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Dataset."), - // Operation: to.Ptr("Read Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/datasets/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Dataset."), - // Operation: to.Ptr("Delete Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/datasets/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Dataset."), - // Operation: to.Ptr("Create or Update Dataset"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Dataset"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Integration Runtime."), - // Operation: to.Ptr("Read Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Integration Runtime."), - // Operation: to.Ptr("Create or Update Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Integration Runtime."), - // Operation: to.Ptr("Delete Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/start/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Starts any Integration Runtime."), - // Operation: to.Ptr("Start Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/stop/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Stops any Integration Runtime."), - // Operation: to.Ptr("Stop Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/getconnectioninfo/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Integration Runtime Connection Info."), - // Operation: to.Ptr("Read Integration Runtime Connection Info"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/getstatus/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads Integration Runtime Status."), - // Operation: to.Ptr("Read Integration Runtime Status"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/listauthkeys/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Lists the Authentication Keys for any Integration Runtime."), - // Operation: to.Ptr("List Integration Runtime Authentication Keys"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/monitoringdata/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Gets the Monitoring Data for any Integration Runtime."), - // Operation: to.Ptr("Get Integration Runtime Monitoring Data"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/nodes/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes the Node for the specified Integration Runtime."), - // Operation: to.Ptr("Delete Integration Runtime Node"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/nodes/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Updates a self-hosted Integration Runtime Node."), - // Operation: to.Ptr("Update Integration Runtime Node"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/nodes/ipAddress/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Returns the IP Address for the specified node of the Integration Runtime."), - // Operation: to.Ptr("Read Integration Runtime Node IP Address"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/synccredentials/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Syncs the Credentials for the specified Integration Runtime."), - // Operation: to.Ptr("Sync Credentials for Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/upgrade/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Upgrades the specified Integration Runtime."), - // Operation: to.Ptr("Upgrade Integration Runtime"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/integrationruntimes/regenerateauthkey/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Regenerates the Authentication Keys for the specified Integration Runtime."), - // Operation: to.Ptr("Regenerate Integration Runtime Authentication Keys"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Integration Runtime"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/triggers/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads any Trigger."), - // Operation: to.Ptr("Read Trigger"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Trigger"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/triggers/write"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Creates or Updates any Trigger."), - // Operation: to.Ptr("Create or Update Trigger"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Trigger"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/triggers/delete"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Deletes any Trigger."), - // Operation: to.Ptr("Delete Trigger"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Trigger"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/triggers/start/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Starts any Trigger."), - // Operation: to.Ptr("Start Trigger"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Trigger"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/triggers/stop/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Stops any Trigger."), - // Operation: to.Ptr("Stop Trigger"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Trigger"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/factories/triggers/triggerruns/read"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Reads the Trigger Runs."), - // Operation: to.Ptr("Read Trigger Runs"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }, - // { - // Name: to.Ptr("Microsoft.DataFactory/locations/configureFactoryRepo/action"), - // Display: &armdatafactory.OperationDisplay{ - // Description: to.Ptr("Configures the repository for the factory."), - // Operation: to.Ptr("Configure Factory Repository"), - // Provider: to.Ptr("Microsoft Data Factory"), - // Resource: to.Ptr("Data Factory"), - // }, - // }}, - // } - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/options.go b/sdk/resourcemanager/datafactory/armdatafactory/options.go new file mode 100644 index 000000000000..19b4d1c6d0ba --- /dev/null +++ b/sdk/resourcemanager/datafactory/armdatafactory/options.go @@ -0,0 +1,642 @@ +//go:build go1.18 +// +build go1.18 + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +package armdatafactory + +// ActivityRunsClientQueryByPipelineRunOptions contains the optional parameters for the ActivityRunsClient.QueryByPipelineRun +// method. +type ActivityRunsClientQueryByPipelineRunOptions struct { + // placeholder for future optional parameters +} + +// ChangeDataCaptureClientCreateOrUpdateOptions contains the optional parameters for the ChangeDataCaptureClient.CreateOrUpdate +// method. +type ChangeDataCaptureClientCreateOrUpdateOptions struct { + // ETag of the change data capture entity. Should only be specified for update, for which it should match existing entity + // or can be * for unconditional update. + IfMatch *string +} + +// ChangeDataCaptureClientDeleteOptions contains the optional parameters for the ChangeDataCaptureClient.Delete method. +type ChangeDataCaptureClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// ChangeDataCaptureClientGetOptions contains the optional parameters for the ChangeDataCaptureClient.Get method. +type ChangeDataCaptureClientGetOptions struct { + // ETag of the change data capture entity. Should only be specified for get. If the ETag matches the existing entity tag, + // or if * was provided, then no content will be returned. + IfNoneMatch *string +} + +// ChangeDataCaptureClientListByFactoryOptions contains the optional parameters for the ChangeDataCaptureClient.NewListByFactoryPager +// method. +type ChangeDataCaptureClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// ChangeDataCaptureClientStartOptions contains the optional parameters for the ChangeDataCaptureClient.Start method. +type ChangeDataCaptureClientStartOptions struct { + // placeholder for future optional parameters +} + +// ChangeDataCaptureClientStatusOptions contains the optional parameters for the ChangeDataCaptureClient.Status method. +type ChangeDataCaptureClientStatusOptions struct { + // placeholder for future optional parameters +} + +// ChangeDataCaptureClientStopOptions contains the optional parameters for the ChangeDataCaptureClient.Stop method. +type ChangeDataCaptureClientStopOptions struct { + // placeholder for future optional parameters +} + +// CredentialOperationsClientCreateOrUpdateOptions contains the optional parameters for the CredentialOperationsClient.CreateOrUpdate +// method. +type CredentialOperationsClientCreateOrUpdateOptions struct { + // ETag of the credential entity. Should only be specified for update, for which it should match existing entity or can be + // * for unconditional update. + IfMatch *string +} + +// CredentialOperationsClientDeleteOptions contains the optional parameters for the CredentialOperationsClient.Delete method. +type CredentialOperationsClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// CredentialOperationsClientGetOptions contains the optional parameters for the CredentialOperationsClient.Get method. +type CredentialOperationsClientGetOptions struct { + // ETag of the credential entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was + // provided, then no content will be returned. + IfNoneMatch *string +} + +// CredentialOperationsClientListByFactoryOptions contains the optional parameters for the CredentialOperationsClient.NewListByFactoryPager +// method. +type CredentialOperationsClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// DataFlowDebugSessionClientAddDataFlowOptions contains the optional parameters for the DataFlowDebugSessionClient.AddDataFlow +// method. +type DataFlowDebugSessionClientAddDataFlowOptions struct { + // placeholder for future optional parameters +} + +// DataFlowDebugSessionClientBeginCreateOptions contains the optional parameters for the DataFlowDebugSessionClient.BeginCreate +// method. +type DataFlowDebugSessionClientBeginCreateOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// DataFlowDebugSessionClientBeginExecuteCommandOptions contains the optional parameters for the DataFlowDebugSessionClient.BeginExecuteCommand +// method. +type DataFlowDebugSessionClientBeginExecuteCommandOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// DataFlowDebugSessionClientDeleteOptions contains the optional parameters for the DataFlowDebugSessionClient.Delete method. +type DataFlowDebugSessionClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// DataFlowDebugSessionClientQueryByFactoryOptions contains the optional parameters for the DataFlowDebugSessionClient.NewQueryByFactoryPager +// method. +type DataFlowDebugSessionClientQueryByFactoryOptions struct { + // placeholder for future optional parameters +} + +// DataFlowsClientCreateOrUpdateOptions contains the optional parameters for the DataFlowsClient.CreateOrUpdate method. +type DataFlowsClientCreateOrUpdateOptions struct { + // ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be + // * for unconditional update. + IfMatch *string +} + +// DataFlowsClientDeleteOptions contains the optional parameters for the DataFlowsClient.Delete method. +type DataFlowsClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// DataFlowsClientGetOptions contains the optional parameters for the DataFlowsClient.Get method. +type DataFlowsClientGetOptions struct { + // ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was + // provided, then no content will be returned. + IfNoneMatch *string +} + +// DataFlowsClientListByFactoryOptions contains the optional parameters for the DataFlowsClient.NewListByFactoryPager method. +type DataFlowsClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// DatasetsClientCreateOrUpdateOptions contains the optional parameters for the DatasetsClient.CreateOrUpdate method. +type DatasetsClientCreateOrUpdateOptions struct { + // ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * + // for unconditional update. + IfMatch *string +} + +// DatasetsClientDeleteOptions contains the optional parameters for the DatasetsClient.Delete method. +type DatasetsClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// DatasetsClientGetOptions contains the optional parameters for the DatasetsClient.Get method. +type DatasetsClientGetOptions struct { + // ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was + // provided, then no content will be returned. + IfNoneMatch *string +} + +// DatasetsClientListByFactoryOptions contains the optional parameters for the DatasetsClient.NewListByFactoryPager method. +type DatasetsClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// ExposureControlClientGetFeatureValueByFactoryOptions contains the optional parameters for the ExposureControlClient.GetFeatureValueByFactory +// method. +type ExposureControlClientGetFeatureValueByFactoryOptions struct { + // placeholder for future optional parameters +} + +// ExposureControlClientGetFeatureValueOptions contains the optional parameters for the ExposureControlClient.GetFeatureValue +// method. +type ExposureControlClientGetFeatureValueOptions struct { + // placeholder for future optional parameters +} + +// ExposureControlClientQueryFeatureValuesByFactoryOptions contains the optional parameters for the ExposureControlClient.QueryFeatureValuesByFactory +// method. +type ExposureControlClientQueryFeatureValuesByFactoryOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientConfigureFactoryRepoOptions contains the optional parameters for the FactoriesClient.ConfigureFactoryRepo +// method. +type FactoriesClientConfigureFactoryRepoOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientCreateOrUpdateOptions contains the optional parameters for the FactoriesClient.CreateOrUpdate method. +type FactoriesClientCreateOrUpdateOptions struct { + // ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * + // for unconditional update. + IfMatch *string +} + +// FactoriesClientDeleteOptions contains the optional parameters for the FactoriesClient.Delete method. +type FactoriesClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientGetDataPlaneAccessOptions contains the optional parameters for the FactoriesClient.GetDataPlaneAccess method. +type FactoriesClientGetDataPlaneAccessOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientGetGitHubAccessTokenOptions contains the optional parameters for the FactoriesClient.GetGitHubAccessToken +// method. +type FactoriesClientGetGitHubAccessTokenOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientGetOptions contains the optional parameters for the FactoriesClient.Get method. +type FactoriesClientGetOptions struct { + // ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was + // provided, then no content will be returned. + IfNoneMatch *string +} + +// FactoriesClientListByResourceGroupOptions contains the optional parameters for the FactoriesClient.NewListByResourceGroupPager +// method. +type FactoriesClientListByResourceGroupOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientListOptions contains the optional parameters for the FactoriesClient.NewListPager method. +type FactoriesClientListOptions struct { + // placeholder for future optional parameters +} + +// FactoriesClientUpdateOptions contains the optional parameters for the FactoriesClient.Update method. +type FactoriesClientUpdateOptions struct { + // placeholder for future optional parameters +} + +// GlobalParametersClientCreateOrUpdateOptions contains the optional parameters for the GlobalParametersClient.CreateOrUpdate +// method. +type GlobalParametersClientCreateOrUpdateOptions struct { + // placeholder for future optional parameters +} + +// GlobalParametersClientDeleteOptions contains the optional parameters for the GlobalParametersClient.Delete method. +type GlobalParametersClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// GlobalParametersClientGetOptions contains the optional parameters for the GlobalParametersClient.Get method. +type GlobalParametersClientGetOptions struct { + // placeholder for future optional parameters +} + +// GlobalParametersClientListByFactoryOptions contains the optional parameters for the GlobalParametersClient.NewListByFactoryPager +// method. +type GlobalParametersClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimeNodesClientDeleteOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Delete +// method. +type IntegrationRuntimeNodesClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimeNodesClientGetIPAddressOptions contains the optional parameters for the IntegrationRuntimeNodesClient.GetIPAddress +// method. +type IntegrationRuntimeNodesClientGetIPAddressOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimeNodesClientGetOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Get method. +type IntegrationRuntimeNodesClientGetOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimeNodesClientUpdateOptions contains the optional parameters for the IntegrationRuntimeNodesClient.Update +// method. +type IntegrationRuntimeNodesClientUpdateOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimeObjectMetadataClientBeginRefreshOptions contains the optional parameters for the IntegrationRuntimeObjectMetadataClient.BeginRefresh +// method. +type IntegrationRuntimeObjectMetadataClientBeginRefreshOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// IntegrationRuntimeObjectMetadataClientGetOptions contains the optional parameters for the IntegrationRuntimeObjectMetadataClient.Get +// method. +type IntegrationRuntimeObjectMetadataClientGetOptions struct { + // The parameters for getting a SSIS object metadata. + GetMetadataRequest *GetSsisObjectMetadataRequest +} + +// IntegrationRuntimesClientBeginStartOptions contains the optional parameters for the IntegrationRuntimesClient.BeginStart +// method. +type IntegrationRuntimesClientBeginStartOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// IntegrationRuntimesClientBeginStopOptions contains the optional parameters for the IntegrationRuntimesClient.BeginStop +// method. +type IntegrationRuntimesClientBeginStopOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// IntegrationRuntimesClientCreateLinkedIntegrationRuntimeOptions contains the optional parameters for the IntegrationRuntimesClient.CreateLinkedIntegrationRuntime +// method. +type IntegrationRuntimesClientCreateLinkedIntegrationRuntimeOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientCreateOrUpdateOptions contains the optional parameters for the IntegrationRuntimesClient.CreateOrUpdate +// method. +type IntegrationRuntimesClientCreateOrUpdateOptions struct { + // ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity + // or can be * for unconditional update. + IfMatch *string +} + +// IntegrationRuntimesClientDeleteOptions contains the optional parameters for the IntegrationRuntimesClient.Delete method. +type IntegrationRuntimesClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientGetConnectionInfoOptions contains the optional parameters for the IntegrationRuntimesClient.GetConnectionInfo +// method. +type IntegrationRuntimesClientGetConnectionInfoOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientGetMonitoringDataOptions contains the optional parameters for the IntegrationRuntimesClient.GetMonitoringData +// method. +type IntegrationRuntimesClientGetMonitoringDataOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientGetOptions contains the optional parameters for the IntegrationRuntimesClient.Get method. +type IntegrationRuntimesClientGetOptions struct { + // ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, + // or if * was provided, then no content will be returned. + IfNoneMatch *string +} + +// IntegrationRuntimesClientGetStatusOptions contains the optional parameters for the IntegrationRuntimesClient.GetStatus +// method. +type IntegrationRuntimesClientGetStatusOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientListAuthKeysOptions contains the optional parameters for the IntegrationRuntimesClient.ListAuthKeys +// method. +type IntegrationRuntimesClientListAuthKeysOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientListByFactoryOptions contains the optional parameters for the IntegrationRuntimesClient.NewListByFactoryPager +// method. +type IntegrationRuntimesClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsOptions contains the optional parameters for the IntegrationRuntimesClient.ListOutboundNetworkDependenciesEndpoints +// method. +type IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientRegenerateAuthKeyOptions contains the optional parameters for the IntegrationRuntimesClient.RegenerateAuthKey +// method. +type IntegrationRuntimesClientRegenerateAuthKeyOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientRemoveLinksOptions contains the optional parameters for the IntegrationRuntimesClient.RemoveLinks +// method. +type IntegrationRuntimesClientRemoveLinksOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientSyncCredentialsOptions contains the optional parameters for the IntegrationRuntimesClient.SyncCredentials +// method. +type IntegrationRuntimesClientSyncCredentialsOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientUpdateOptions contains the optional parameters for the IntegrationRuntimesClient.Update method. +type IntegrationRuntimesClientUpdateOptions struct { + // placeholder for future optional parameters +} + +// IntegrationRuntimesClientUpgradeOptions contains the optional parameters for the IntegrationRuntimesClient.Upgrade method. +type IntegrationRuntimesClientUpgradeOptions struct { + // placeholder for future optional parameters +} + +// LinkedServicesClientCreateOrUpdateOptions contains the optional parameters for the LinkedServicesClient.CreateOrUpdate +// method. +type LinkedServicesClientCreateOrUpdateOptions struct { + // ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can + // be * for unconditional update. + IfMatch *string +} + +// LinkedServicesClientDeleteOptions contains the optional parameters for the LinkedServicesClient.Delete method. +type LinkedServicesClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// LinkedServicesClientGetOptions contains the optional parameters for the LinkedServicesClient.Get method. +type LinkedServicesClientGetOptions struct { + // ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if + // * was provided, then no content will be returned. + IfNoneMatch *string +} + +// LinkedServicesClientListByFactoryOptions contains the optional parameters for the LinkedServicesClient.NewListByFactoryPager +// method. +type LinkedServicesClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// ManagedPrivateEndpointsClientCreateOrUpdateOptions contains the optional parameters for the ManagedPrivateEndpointsClient.CreateOrUpdate +// method. +type ManagedPrivateEndpointsClientCreateOrUpdateOptions struct { + // ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity + // or can be * for unconditional update. + IfMatch *string +} + +// ManagedPrivateEndpointsClientDeleteOptions contains the optional parameters for the ManagedPrivateEndpointsClient.Delete +// method. +type ManagedPrivateEndpointsClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// ManagedPrivateEndpointsClientGetOptions contains the optional parameters for the ManagedPrivateEndpointsClient.Get method. +type ManagedPrivateEndpointsClientGetOptions struct { + // ETag of the managed private endpoint entity. Should only be specified for get. If the ETag matches the existing entity + // tag, or if * was provided, then no content will be returned. + IfNoneMatch *string +} + +// ManagedPrivateEndpointsClientListByFactoryOptions contains the optional parameters for the ManagedPrivateEndpointsClient.NewListByFactoryPager +// method. +type ManagedPrivateEndpointsClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// ManagedVirtualNetworksClientCreateOrUpdateOptions contains the optional parameters for the ManagedVirtualNetworksClient.CreateOrUpdate +// method. +type ManagedVirtualNetworksClientCreateOrUpdateOptions struct { + // ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity + // or can be * for unconditional update. + IfMatch *string +} + +// ManagedVirtualNetworksClientGetOptions contains the optional parameters for the ManagedVirtualNetworksClient.Get method. +type ManagedVirtualNetworksClientGetOptions struct { + // ETag of the managed Virtual Network entity. Should only be specified for get. If the ETag matches the existing entity tag, + // or if * was provided, then no content will be returned. + IfNoneMatch *string +} + +// ManagedVirtualNetworksClientListByFactoryOptions contains the optional parameters for the ManagedVirtualNetworksClient.NewListByFactoryPager +// method. +type ManagedVirtualNetworksClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// OperationsClientListOptions contains the optional parameters for the OperationsClient.NewListPager method. +type OperationsClientListOptions struct { + // placeholder for future optional parameters +} + +// PipelineRunsClientCancelOptions contains the optional parameters for the PipelineRunsClient.Cancel method. +type PipelineRunsClientCancelOptions struct { + // If true, cancel all the Child pipelines that are triggered by the current pipeline. + IsRecursive *bool +} + +// PipelineRunsClientGetOptions contains the optional parameters for the PipelineRunsClient.Get method. +type PipelineRunsClientGetOptions struct { + // placeholder for future optional parameters +} + +// PipelineRunsClientQueryByFactoryOptions contains the optional parameters for the PipelineRunsClient.QueryByFactory method. +type PipelineRunsClientQueryByFactoryOptions struct { + // placeholder for future optional parameters +} + +// PipelinesClientCreateOrUpdateOptions contains the optional parameters for the PipelinesClient.CreateOrUpdate method. +type PipelinesClientCreateOrUpdateOptions struct { + // ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * + // for unconditional update. + IfMatch *string +} + +// PipelinesClientCreateRunOptions contains the optional parameters for the PipelinesClient.CreateRun method. +type PipelinesClientCreateRunOptions struct { + // Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped + // under the same groupId. + IsRecovery *bool + + // Parameters of the pipeline run. These parameters will be used only if the runId is not specified. + Parameters map[string]any + + // The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. + ReferencePipelineRunID *string + + // In recovery mode, the rerun will start from this activity. If not specified, all activities will run. + StartActivityName *string + + // In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName + // is not specified. + StartFromFailure *bool +} + +// PipelinesClientDeleteOptions contains the optional parameters for the PipelinesClient.Delete method. +type PipelinesClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// PipelinesClientGetOptions contains the optional parameters for the PipelinesClient.Get method. +type PipelinesClientGetOptions struct { + // ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was + // provided, then no content will be returned. + IfNoneMatch *string +} + +// PipelinesClientListByFactoryOptions contains the optional parameters for the PipelinesClient.NewListByFactoryPager method. +type PipelinesClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// PrivateEndPointConnectionsClientListByFactoryOptions contains the optional parameters for the PrivateEndPointConnectionsClient.NewListByFactoryPager +// method. +type PrivateEndPointConnectionsClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// PrivateEndpointConnectionClientCreateOrUpdateOptions contains the optional parameters for the PrivateEndpointConnectionClient.CreateOrUpdate +// method. +type PrivateEndpointConnectionClientCreateOrUpdateOptions struct { + // ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing + // entity or can be * for unconditional update. + IfMatch *string +} + +// PrivateEndpointConnectionClientDeleteOptions contains the optional parameters for the PrivateEndpointConnectionClient.Delete +// method. +type PrivateEndpointConnectionClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// PrivateEndpointConnectionClientGetOptions contains the optional parameters for the PrivateEndpointConnectionClient.Get +// method. +type PrivateEndpointConnectionClientGetOptions struct { + // ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity + // tag, or if * was provided, then no content will be returned. + IfNoneMatch *string +} + +// PrivateLinkResourcesClientGetOptions contains the optional parameters for the PrivateLinkResourcesClient.Get method. +type PrivateLinkResourcesClientGetOptions struct { + // placeholder for future optional parameters +} + +// TriggerRunsClientCancelOptions contains the optional parameters for the TriggerRunsClient.Cancel method. +type TriggerRunsClientCancelOptions struct { + // placeholder for future optional parameters +} + +// TriggerRunsClientQueryByFactoryOptions contains the optional parameters for the TriggerRunsClient.QueryByFactory method. +type TriggerRunsClientQueryByFactoryOptions struct { + // placeholder for future optional parameters +} + +// TriggerRunsClientRerunOptions contains the optional parameters for the TriggerRunsClient.Rerun method. +type TriggerRunsClientRerunOptions struct { + // placeholder for future optional parameters +} + +// TriggersClientBeginStartOptions contains the optional parameters for the TriggersClient.BeginStart method. +type TriggersClientBeginStartOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// TriggersClientBeginStopOptions contains the optional parameters for the TriggersClient.BeginStop method. +type TriggersClientBeginStopOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// TriggersClientBeginSubscribeToEventsOptions contains the optional parameters for the TriggersClient.BeginSubscribeToEvents +// method. +type TriggersClientBeginSubscribeToEventsOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// TriggersClientBeginUnsubscribeFromEventsOptions contains the optional parameters for the TriggersClient.BeginUnsubscribeFromEvents +// method. +type TriggersClientBeginUnsubscribeFromEventsOptions struct { + // Resumes the LRO from the provided token. + ResumeToken string +} + +// TriggersClientCreateOrUpdateOptions contains the optional parameters for the TriggersClient.CreateOrUpdate method. +type TriggersClientCreateOrUpdateOptions struct { + // ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * + // for unconditional update. + IfMatch *string +} + +// TriggersClientDeleteOptions contains the optional parameters for the TriggersClient.Delete method. +type TriggersClientDeleteOptions struct { + // placeholder for future optional parameters +} + +// TriggersClientGetEventSubscriptionStatusOptions contains the optional parameters for the TriggersClient.GetEventSubscriptionStatus +// method. +type TriggersClientGetEventSubscriptionStatusOptions struct { + // placeholder for future optional parameters +} + +// TriggersClientGetOptions contains the optional parameters for the TriggersClient.Get method. +type TriggersClientGetOptions struct { + // ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was + // provided, then no content will be returned. + IfNoneMatch *string +} + +// TriggersClientListByFactoryOptions contains the optional parameters for the TriggersClient.NewListByFactoryPager method. +type TriggersClientListByFactoryOptions struct { + // placeholder for future optional parameters +} + +// TriggersClientQueryByFactoryOptions contains the optional parameters for the TriggersClient.QueryByFactory method. +type TriggersClientQueryByFactoryOptions struct { + // placeholder for future optional parameters +} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client.go b/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client.go index be67fd2e6255..53fd90c09068 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,16 +53,18 @@ func NewPipelineRunsClient(subscriptionID string, credential azcore.TokenCredent // - runID - The pipeline run identifier. // - options - PipelineRunsClientCancelOptions contains the optional parameters for the PipelineRunsClient.Cancel method. func (client *PipelineRunsClient) Cancel(ctx context.Context, resourceGroupName string, factoryName string, runID string, options *PipelineRunsClientCancelOptions) (PipelineRunsClientCancelResponse, error) { + var err error req, err := client.cancelCreateRequest(ctx, resourceGroupName, factoryName, runID, options) if err != nil { return PipelineRunsClientCancelResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelineRunsClientCancelResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PipelineRunsClientCancelResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PipelineRunsClientCancelResponse{}, err } return PipelineRunsClientCancelResponse{}, nil } @@ -110,18 +111,21 @@ func (client *PipelineRunsClient) cancelCreateRequest(ctx context.Context, resou // - runID - The pipeline run identifier. // - options - PipelineRunsClientGetOptions contains the optional parameters for the PipelineRunsClient.Get method. func (client *PipelineRunsClient) Get(ctx context.Context, resourceGroupName string, factoryName string, runID string, options *PipelineRunsClientGetOptions) (PipelineRunsClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, runID, options) if err != nil { return PipelineRunsClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelineRunsClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PipelineRunsClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PipelineRunsClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. @@ -173,18 +177,21 @@ func (client *PipelineRunsClient) getHandleResponse(resp *http.Response) (Pipeli // - options - PipelineRunsClientQueryByFactoryOptions contains the optional parameters for the PipelineRunsClient.QueryByFactory // method. func (client *PipelineRunsClient) QueryByFactory(ctx context.Context, resourceGroupName string, factoryName string, filterParameters RunFilterParameters, options *PipelineRunsClientQueryByFactoryOptions) (PipelineRunsClientQueryByFactoryResponse, error) { + var err error req, err := client.queryByFactoryCreateRequest(ctx, resourceGroupName, factoryName, filterParameters, options) if err != nil { return PipelineRunsClientQueryByFactoryResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelineRunsClientQueryByFactoryResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PipelineRunsClientQueryByFactoryResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PipelineRunsClientQueryByFactoryResponse{}, err } - return client.queryByFactoryHandleResponse(resp) + resp, err := client.queryByFactoryHandleResponse(httpResp) + return resp, err } // queryByFactoryCreateRequest creates the QueryByFactory request. @@ -210,7 +217,10 @@ func (client *PipelineRunsClient) queryByFactoryCreateRequest(ctx context.Contex reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, filterParameters) + if err := runtime.MarshalAsJSON(req, filterParameters); err != nil { + return nil, err + } + return req, nil } // queryByFactoryHandleResponse handles the QueryByFactory response. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client_example_test.go deleted file mode 100644 index 33db4884da8c..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/pipelineruns_client_example_test.go +++ /dev/null @@ -1,158 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "time" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_QueryByFactory.json -func ExamplePipelineRunsClient_QueryByFactory() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPipelineRunsClient().QueryByFactory(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.RunFilterParameters{ - Filters: []*armdatafactory.RunQueryFilter{ - { - Operand: to.Ptr(armdatafactory.RunQueryFilterOperandPipelineName), - Operator: to.Ptr(armdatafactory.RunQueryFilterOperatorEquals), - Values: []*string{ - to.Ptr("examplePipeline")}, - }}, - LastUpdatedAfter: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:36:44.3345758Z"); return t }()), - LastUpdatedBefore: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:49:48.3686473Z"); return t }()), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PipelineRunsQueryResponse = armdatafactory.PipelineRunsQueryResponse{ - // Value: []*armdatafactory.PipelineRun{ - // { - // AdditionalProperties: map[string]any{ - // "annotations": []any{ - // }, - // "runDimension": map[string]any{ - // "JobId": "79c1cc52-265f-41a5-9553-be65e736fbd3", - // }, - // }, - // DurationInMs: to.Ptr[int32](28105), - // InvokedBy: &armdatafactory.PipelineRunInvokedBy{ - // Name: to.Ptr("Manual"), - // ID: to.Ptr("80a01654a9d34ad18b3fcac5d5d76b67"), - // }, - // LastUpdated: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:38:12.7314495Z"); return t}()), - // Message: to.Ptr(""), - // Parameters: map[string]*string{ - // "OutputBlobNameList": to.Ptr("[\"exampleoutput.csv\"]"), - // }, - // PipelineName: to.Ptr("examplePipeline"), - // RunEnd: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:38:12.7314495Z"); return t}()), - // RunID: to.Ptr("2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"), - // RunStart: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:37:44.6257014Z"); return t}()), - // Status: to.Ptr("Succeeded"), - // }, - // { - // AdditionalProperties: map[string]any{ - // "annotations": []any{ - // }, - // "runDimension": map[string]any{ - // "JobId": "84a3c493-0628-4b44-852f-ef5b3a11bdab", - // }, - // }, - // InvokedBy: &armdatafactory.PipelineRunInvokedBy{ - // Name: to.Ptr("Manual"), - // ID: to.Ptr("7c5fd7ef7e8a464b98b931cf15fcac66"), - // }, - // LastUpdated: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:51.216097Z"); return t}()), - // Message: to.Ptr(""), - // Parameters: map[string]*string{ - // "OutputBlobNameList": to.Ptr("[\"exampleoutput.csv\"]"), - // }, - // PipelineName: to.Ptr("examplePipeline"), - // RunID: to.Ptr("16ac5348-ff82-4f95-a80d-638c1d47b721"), - // RunStart: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:49.2745128Z"); return t}()), - // Status: to.Ptr("Cancelled"), - // }}, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Get.json -func ExamplePipelineRunsClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPipelineRunsClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PipelineRun = armdatafactory.PipelineRun{ - // AdditionalProperties: map[string]any{ - // "annotations": []any{ - // }, - // }, - // DurationInMs: to.Ptr[int32](28105), - // InvokedBy: &armdatafactory.PipelineRunInvokedBy{ - // Name: to.Ptr("Manual"), - // ID: to.Ptr("80a01654a9d34ad18b3fcac5d5d76b67"), - // }, - // LastUpdated: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:38:12.7314495Z"); return t}()), - // Message: to.Ptr(""), - // Parameters: map[string]*string{ - // "OutputBlobNameList": to.Ptr("[\"exampleoutput.csv\"]"), - // }, - // PipelineName: to.Ptr("examplePipeline"), - // RunEnd: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:38:12.7314495Z"); return t}()), - // RunID: to.Ptr("2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"), - // RunStart: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:37:44.6257014Z"); return t}()), - // Status: to.Ptr("Succeeded"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PipelineRuns_Cancel.json -func ExamplePipelineRunsClient_Cancel() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewPipelineRunsClient().Cancel(ctx, "exampleResourceGroup", "exampleFactoryName", "16ac5348-ff82-4f95-a80d-638c1d47b721", &armdatafactory.PipelineRunsClientCancelOptions{IsRecursive: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client.go b/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client.go index ff86c5c2ed87..3faa8b898d1a 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -56,18 +55,21 @@ func NewPipelinesClient(subscriptionID string, credential azcore.TokenCredential // - options - PipelinesClientCreateOrUpdateOptions contains the optional parameters for the PipelinesClient.CreateOrUpdate // method. func (client *PipelinesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, pipelineName string, pipeline PipelineResource, options *PipelinesClientCreateOrUpdateOptions) (PipelinesClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, pipelineName, pipeline, options) if err != nil { return PipelinesClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelinesClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PipelinesClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PipelinesClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -100,7 +102,10 @@ func (client *PipelinesClient) createOrUpdateCreateRequest(ctx context.Context, req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, pipeline) + if err := runtime.MarshalAsJSON(req, pipeline); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -121,18 +126,21 @@ func (client *PipelinesClient) createOrUpdateHandleResponse(resp *http.Response) // - pipelineName - The pipeline name. // - options - PipelinesClientCreateRunOptions contains the optional parameters for the PipelinesClient.CreateRun method. func (client *PipelinesClient) CreateRun(ctx context.Context, resourceGroupName string, factoryName string, pipelineName string, options *PipelinesClientCreateRunOptions) (PipelinesClientCreateRunResponse, error) { + var err error req, err := client.createRunCreateRequest(ctx, resourceGroupName, factoryName, pipelineName, options) if err != nil { return PipelinesClientCreateRunResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelinesClientCreateRunResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PipelinesClientCreateRunResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PipelinesClientCreateRunResponse{}, err } - return client.createRunHandleResponse(resp) + resp, err := client.createRunHandleResponse(httpResp) + return resp, err } // createRunCreateRequest creates the CreateRun request. @@ -175,7 +183,10 @@ func (client *PipelinesClient) createRunCreateRequest(ctx context.Context, resou req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} if options != nil && options.Parameters != nil { - return req, runtime.MarshalAsJSON(req, options.Parameters) + if err := runtime.MarshalAsJSON(req, options.Parameters); err != nil { + return nil, err + } + return req, nil } return req, nil } @@ -198,16 +209,18 @@ func (client *PipelinesClient) createRunHandleResponse(resp *http.Response) (Pip // - pipelineName - The pipeline name. // - options - PipelinesClientDeleteOptions contains the optional parameters for the PipelinesClient.Delete method. func (client *PipelinesClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, pipelineName string, options *PipelinesClientDeleteOptions) (PipelinesClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, pipelineName, options) if err != nil { return PipelinesClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelinesClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return PipelinesClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return PipelinesClientDeleteResponse{}, err } return PipelinesClientDeleteResponse{}, nil } @@ -251,18 +264,21 @@ func (client *PipelinesClient) deleteCreateRequest(ctx context.Context, resource // - pipelineName - The pipeline name. // - options - PipelinesClientGetOptions contains the optional parameters for the PipelinesClient.Get method. func (client *PipelinesClient) Get(ctx context.Context, resourceGroupName string, factoryName string, pipelineName string, options *PipelinesClientGetOptions) (PipelinesClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, pipelineName, options) if err != nil { return PipelinesClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PipelinesClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return PipelinesClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return PipelinesClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client_example_test.go deleted file mode 100644 index 3addab4fd615..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/pipelines_client_example_test.go +++ /dev/null @@ -1,558 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_ListByFactory.json -func ExamplePipelinesClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewPipelinesClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.PipelineListResponse = armdatafactory.PipelineListResponse{ - // Value: []*armdatafactory.PipelineResource{ - // { - // Name: to.Ptr("examplePipeline"), - // Type: to.Ptr("Microsoft.DataFactory/factories/pipelines"), - // Etag: to.Ptr("0a006cd4-0000-0000-0000-5b245bd60000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/pipelines/examplePipeline"), - // Properties: &armdatafactory.Pipeline{ - // Description: to.Ptr("Example description"), - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.ForEachActivity{ - // Name: to.Ptr("ExampleForeachActivity"), - // Type: to.Ptr("ForEach"), - // TypeProperties: &armdatafactory.ForEachActivityTypeProperties{ - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.CopyActivity{ - // Name: to.Ptr("ExampleCopyActivity"), - // Type: to.Ptr("Copy"), - // Inputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": "examplecontainer.csv", - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // Outputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": map[string]any{ - // "type": "Expression", - // "value": "@item()", - // }, - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // TypeProperties: &armdatafactory.CopyActivityTypeProperties{ - // DataIntegrationUnits: float64(32), - // Sink: &armdatafactory.BlobSink{ - // Type: to.Ptr("BlobSink"), - // }, - // Source: &armdatafactory.BlobSource{ - // Type: to.Ptr("BlobSource"), - // }, - // }, - // }}, - // IsSequential: to.Ptr(true), - // Items: &armdatafactory.Expression{ - // Type: to.Ptr(armdatafactory.ExpressionTypeExpression), - // Value: to.Ptr("@pipeline().parameters.OutputBlobNameList"), - // }, - // }, - // }}, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "OutputBlobNameList": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeArray), - // }, - // }, - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Create.json -func ExamplePipelinesClient_CreateOrUpdate_pipelinesCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPipelinesClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "examplePipeline", armdatafactory.PipelineResource{ - Properties: &armdatafactory.Pipeline{ - Activities: []armdatafactory.ActivityClassification{ - &armdatafactory.ForEachActivity{ - Name: to.Ptr("ExampleForeachActivity"), - Type: to.Ptr("ForEach"), - TypeProperties: &armdatafactory.ForEachActivityTypeProperties{ - Activities: []armdatafactory.ActivityClassification{ - &armdatafactory.CopyActivity{ - Name: to.Ptr("ExampleCopyActivity"), - Type: to.Ptr("Copy"), - Inputs: []*armdatafactory.DatasetReference{ - { - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - Parameters: map[string]any{ - "MyFileName": "examplecontainer.csv", - "MyFolderPath": "examplecontainer", - }, - ReferenceName: to.Ptr("exampleDataset"), - }}, - Outputs: []*armdatafactory.DatasetReference{ - { - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - Parameters: map[string]any{ - "MyFileName": map[string]any{ - "type": "Expression", - "value": "@item()", - }, - "MyFolderPath": "examplecontainer", - }, - ReferenceName: to.Ptr("exampleDataset"), - }}, - TypeProperties: &armdatafactory.CopyActivityTypeProperties{ - DataIntegrationUnits: float64(32), - Sink: &armdatafactory.BlobSink{ - Type: to.Ptr("BlobSink"), - }, - Source: &armdatafactory.BlobSource{ - Type: to.Ptr("BlobSource"), - }, - }, - }}, - IsSequential: to.Ptr(true), - Items: &armdatafactory.Expression{ - Type: to.Ptr(armdatafactory.ExpressionTypeExpression), - Value: to.Ptr("@pipeline().parameters.OutputBlobNameList"), - }, - }, - }}, - Parameters: map[string]*armdatafactory.ParameterSpecification{ - "JobId": { - Type: to.Ptr(armdatafactory.ParameterTypeString), - }, - "OutputBlobNameList": { - Type: to.Ptr(armdatafactory.ParameterTypeArray), - }, - }, - Policy: &armdatafactory.PipelinePolicy{ - ElapsedTimeMetric: &armdatafactory.PipelineElapsedTimeMetricPolicy{ - Duration: "0.00:10:00", - }, - }, - RunDimensions: map[string]any{ - "JobId": map[string]any{ - "type": "Expression", - "value": "@pipeline().parameters.JobId", - }, - }, - Variables: map[string]*armdatafactory.VariableSpecification{ - "TestVariableArray": { - Type: to.Ptr(armdatafactory.VariableTypeArray), - }, - }, - }, - }, &armdatafactory.PipelinesClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PipelineResource = armdatafactory.PipelineResource{ - // Name: to.Ptr("examplePipeline"), - // Type: to.Ptr("Microsoft.DataFactory/factories/pipelines"), - // Etag: to.Ptr("0a0069d4-0000-0000-0000-5b245bd50000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/pipelines/examplePipeline"), - // Properties: &armdatafactory.Pipeline{ - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.ForEachActivity{ - // Name: to.Ptr("ExampleForeachActivity"), - // Type: to.Ptr("ForEach"), - // TypeProperties: &armdatafactory.ForEachActivityTypeProperties{ - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.CopyActivity{ - // Name: to.Ptr("ExampleCopyActivity"), - // Type: to.Ptr("Copy"), - // Inputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": "examplecontainer.csv", - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // Outputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": map[string]any{ - // "type": "Expression", - // "value": "@item()", - // }, - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // TypeProperties: &armdatafactory.CopyActivityTypeProperties{ - // DataIntegrationUnits: float64(32), - // Sink: &armdatafactory.BlobSink{ - // Type: to.Ptr("BlobSink"), - // }, - // Source: &armdatafactory.BlobSource{ - // Type: to.Ptr("BlobSource"), - // }, - // }, - // }}, - // IsSequential: to.Ptr(true), - // Items: &armdatafactory.Expression{ - // Type: to.Ptr(armdatafactory.ExpressionTypeExpression), - // Value: to.Ptr("@pipeline().parameters.OutputBlobNameList"), - // }, - // }, - // }}, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "JobId": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeString), - // }, - // "OutputBlobNameList": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeArray), - // }, - // }, - // RunDimensions: map[string]any{ - // "JobId": map[string]any{ - // "type": "Expression", - // "value": "@pipeline().parameters.JobId", - // }, - // }, - // Variables: map[string]*armdatafactory.VariableSpecification{ - // "TestVariableArray": &armdatafactory.VariableSpecification{ - // Type: to.Ptr(armdatafactory.VariableTypeArray), - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Update.json -func ExamplePipelinesClient_CreateOrUpdate_pipelinesUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPipelinesClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "examplePipeline", armdatafactory.PipelineResource{ - Properties: &armdatafactory.Pipeline{ - Description: to.Ptr("Example description"), - Activities: []armdatafactory.ActivityClassification{ - &armdatafactory.ForEachActivity{ - Name: to.Ptr("ExampleForeachActivity"), - Type: to.Ptr("ForEach"), - TypeProperties: &armdatafactory.ForEachActivityTypeProperties{ - Activities: []armdatafactory.ActivityClassification{ - &armdatafactory.CopyActivity{ - Name: to.Ptr("ExampleCopyActivity"), - Type: to.Ptr("Copy"), - Inputs: []*armdatafactory.DatasetReference{ - { - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - Parameters: map[string]any{ - "MyFileName": "examplecontainer.csv", - "MyFolderPath": "examplecontainer", - }, - ReferenceName: to.Ptr("exampleDataset"), - }}, - Outputs: []*armdatafactory.DatasetReference{ - { - Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - Parameters: map[string]any{ - "MyFileName": map[string]any{ - "type": "Expression", - "value": "@item()", - }, - "MyFolderPath": "examplecontainer", - }, - ReferenceName: to.Ptr("exampleDataset"), - }}, - TypeProperties: &armdatafactory.CopyActivityTypeProperties{ - DataIntegrationUnits: float64(32), - Sink: &armdatafactory.BlobSink{ - Type: to.Ptr("BlobSink"), - }, - Source: &armdatafactory.BlobSource{ - Type: to.Ptr("BlobSource"), - }, - }, - }}, - IsSequential: to.Ptr(true), - Items: &armdatafactory.Expression{ - Type: to.Ptr(armdatafactory.ExpressionTypeExpression), - Value: to.Ptr("@pipeline().parameters.OutputBlobNameList"), - }, - }, - }}, - Parameters: map[string]*armdatafactory.ParameterSpecification{ - "OutputBlobNameList": { - Type: to.Ptr(armdatafactory.ParameterTypeArray), - }, - }, - Policy: &armdatafactory.PipelinePolicy{ - ElapsedTimeMetric: &armdatafactory.PipelineElapsedTimeMetricPolicy{ - Duration: "0.00:10:00", - }, - }, - }, - }, &armdatafactory.PipelinesClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PipelineResource = armdatafactory.PipelineResource{ - // Name: to.Ptr("examplePipeline"), - // Type: to.Ptr("Microsoft.DataFactory/factories/pipelines"), - // Etag: to.Ptr("0a006cd4-0000-0000-0000-5b245bd60000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/pipelines/examplePipeline"), - // Properties: &armdatafactory.Pipeline{ - // Description: to.Ptr("Example description"), - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.ForEachActivity{ - // Name: to.Ptr("ExampleForeachActivity"), - // Type: to.Ptr("ForEach"), - // TypeProperties: &armdatafactory.ForEachActivityTypeProperties{ - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.CopyActivity{ - // Name: to.Ptr("ExampleCopyActivity"), - // Type: to.Ptr("Copy"), - // Inputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": "examplecontainer.csv", - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // Outputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": map[string]any{ - // "type": "Expression", - // "value": "@item()", - // }, - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // TypeProperties: &armdatafactory.CopyActivityTypeProperties{ - // DataIntegrationUnits: float64(32), - // Sink: &armdatafactory.BlobSink{ - // Type: to.Ptr("BlobSink"), - // }, - // Source: &armdatafactory.BlobSource{ - // Type: to.Ptr("BlobSource"), - // }, - // }, - // }}, - // IsSequential: to.Ptr(true), - // Items: &armdatafactory.Expression{ - // Type: to.Ptr(armdatafactory.ExpressionTypeExpression), - // Value: to.Ptr("@pipeline().parameters.OutputBlobNameList"), - // }, - // }, - // }}, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "OutputBlobNameList": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeArray), - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Get.json -func ExamplePipelinesClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPipelinesClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "examplePipeline", &armdatafactory.PipelinesClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PipelineResource = armdatafactory.PipelineResource{ - // Name: to.Ptr("examplePipeline"), - // Type: to.Ptr("Microsoft.DataFactory/factories/pipelines"), - // Etag: to.Ptr("1500504f-0000-0200-0000-5cbe090f0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/pipelines/examplePipeline"), - // Properties: &armdatafactory.Pipeline{ - // Description: to.Ptr("Example description"), - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.ForEachActivity{ - // Name: to.Ptr("ExampleForeachActivity"), - // Type: to.Ptr("ForEach"), - // TypeProperties: &armdatafactory.ForEachActivityTypeProperties{ - // Activities: []armdatafactory.ActivityClassification{ - // &armdatafactory.CopyActivity{ - // Name: to.Ptr("ExampleCopyActivity"), - // Type: to.Ptr("Copy"), - // Inputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": "examplecontainer.csv", - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // Outputs: []*armdatafactory.DatasetReference{ - // { - // Type: to.Ptr(armdatafactory.DatasetReferenceTypeDatasetReference), - // Parameters: map[string]any{ - // "MyFileName": map[string]any{ - // "type": "Expression", - // "value": "@item()", - // }, - // "MyFolderPath": "examplecontainer", - // }, - // ReferenceName: to.Ptr("exampleDataset"), - // }}, - // TypeProperties: &armdatafactory.CopyActivityTypeProperties{ - // DataIntegrationUnits: float64(32), - // Sink: &armdatafactory.BlobSink{ - // Type: to.Ptr("BlobSink"), - // }, - // Source: &armdatafactory.BlobSource{ - // Type: to.Ptr("BlobSource"), - // }, - // }, - // }}, - // IsSequential: to.Ptr(true), - // Items: &armdatafactory.Expression{ - // Type: to.Ptr(armdatafactory.ExpressionTypeExpression), - // Value: to.Ptr("@pipeline().parameters.OutputBlobNameList"), - // }, - // }, - // }}, - // Parameters: map[string]*armdatafactory.ParameterSpecification{ - // "OutputBlobNameList": &armdatafactory.ParameterSpecification{ - // Type: to.Ptr(armdatafactory.ParameterTypeArray), - // }, - // }, - // Policy: &armdatafactory.PipelinePolicy{ - // ElapsedTimeMetric: &armdatafactory.PipelineElapsedTimeMetricPolicy{ - // Duration: "0.00:10:00", - // }, - // }, - // Variables: map[string]*armdatafactory.VariableSpecification{ - // "TestVariableArray": &armdatafactory.VariableSpecification{ - // Type: to.Ptr(armdatafactory.VariableTypeArray), - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_Delete.json -func ExamplePipelinesClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewPipelinesClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "examplePipeline", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Pipelines_CreateRun.json -func ExamplePipelinesClient_CreateRun() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPipelinesClient().CreateRun(ctx, "exampleResourceGroup", "exampleFactoryName", "examplePipeline", &armdatafactory.PipelinesClientCreateRunOptions{ReferencePipelineRunID: nil, - IsRecovery: nil, - StartActivityName: nil, - StartFromFailure: nil, - Parameters: map[string]any{ - "OutputBlobNameList": []any{ - "exampleoutput.csv", - }, - }, - }) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.CreateRunResponse = armdatafactory.CreateRunResponse{ - // RunID: to.Ptr("2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"), - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/polymorphic_helpers.go b/sdk/resourcemanager/datafactory/armdatafactory/polymorphic_helpers.go index 42d366ae3711..84e5fbca54da 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/polymorphic_helpers.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/polymorphic_helpers.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -106,7 +105,10 @@ func unmarshalActivityClassification(rawMsg json.RawMessage) (ActivityClassifica default: b = &Activity{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalActivityClassificationArray(rawMsg json.RawMessage) ([]ActivityClassification, error) { @@ -147,7 +149,10 @@ func unmarshalCompressionReadSettingsClassification(rawMsg json.RawMessage) (Com default: b = &CompressionReadSettings{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalCopySinkClassification(rawMsg json.RawMessage) (CopySinkClassification, error) { @@ -241,7 +246,10 @@ func unmarshalCopySinkClassification(rawMsg json.RawMessage) (CopySinkClassifica default: b = &CopySink{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalCopySourceClassification(rawMsg json.RawMessage) (CopySourceClassification, error) { @@ -449,7 +457,10 @@ func unmarshalCopySourceClassification(rawMsg json.RawMessage) (CopySourceClassi default: b = &CopySource{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalCredentialClassification(rawMsg json.RawMessage) (CredentialClassification, error) { @@ -469,7 +480,10 @@ func unmarshalCredentialClassification(rawMsg json.RawMessage) (CredentialClassi default: b = &Credential{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalCustomSetupBaseClassification(rawMsg json.RawMessage) (CustomSetupBaseClassification, error) { @@ -493,7 +507,10 @@ func unmarshalCustomSetupBaseClassification(rawMsg json.RawMessage) (CustomSetup default: b = &CustomSetupBase{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalCustomSetupBaseClassificationArray(rawMsg json.RawMessage) ([]CustomSetupBaseClassification, error) { @@ -534,7 +551,10 @@ func unmarshalDataFlowClassification(rawMsg json.RawMessage) (DataFlowClassifica default: b = &DataFlow{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalDatasetClassification(rawMsg json.RawMessage) (DatasetClassification, error) { @@ -742,7 +762,10 @@ func unmarshalDatasetClassification(rawMsg json.RawMessage) (DatasetClassificati default: b = &Dataset{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalDatasetLocationClassification(rawMsg json.RawMessage) (DatasetLocationClassification, error) { @@ -784,7 +807,10 @@ func unmarshalDatasetLocationClassification(rawMsg json.RawMessage) (DatasetLoca default: b = &DatasetLocation{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalDatasetStorageFormatClassification(rawMsg json.RawMessage) (DatasetStorageFormatClassification, error) { @@ -810,7 +836,10 @@ func unmarshalDatasetStorageFormatClassification(rawMsg json.RawMessage) (Datase default: b = &DatasetStorageFormat{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalDependencyReferenceClassification(rawMsg json.RawMessage) (DependencyReferenceClassification, error) { @@ -832,7 +861,10 @@ func unmarshalDependencyReferenceClassification(rawMsg json.RawMessage) (Depende default: b = &DependencyReference{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalDependencyReferenceClassificationArray(rawMsg json.RawMessage) ([]DependencyReferenceClassification, error) { @@ -871,7 +903,10 @@ func unmarshalFactoryRepoConfigurationClassification(rawMsg json.RawMessage) (Fa default: b = &FactoryRepoConfiguration{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalFormatReadSettingsClassification(rawMsg json.RawMessage) (FormatReadSettingsClassification, error) { @@ -890,12 +925,17 @@ func unmarshalFormatReadSettingsClassification(rawMsg json.RawMessage) (FormatRe b = &DelimitedTextReadSettings{} case "JsonReadSettings": b = &JSONReadSettings{} + case "ParquetReadSettings": + b = &ParquetReadSettings{} case "XmlReadSettings": b = &XMLReadSettings{} default: b = &FormatReadSettings{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalIntegrationRuntimeClassification(rawMsg json.RawMessage) (IntegrationRuntimeClassification, error) { @@ -915,7 +955,10 @@ func unmarshalIntegrationRuntimeClassification(rawMsg json.RawMessage) (Integrat default: b = &IntegrationRuntime{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalIntegrationRuntimeStatusClassification(rawMsg json.RawMessage) (IntegrationRuntimeStatusClassification, error) { @@ -935,7 +978,10 @@ func unmarshalIntegrationRuntimeStatusClassification(rawMsg json.RawMessage) (In default: b = &IntegrationRuntimeStatus{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalLinkedIntegrationRuntimeTypeClassification(rawMsg json.RawMessage) (LinkedIntegrationRuntimeTypeClassification, error) { @@ -955,7 +1001,10 @@ func unmarshalLinkedIntegrationRuntimeTypeClassification(rawMsg json.RawMessage) default: b = &LinkedIntegrationRuntimeType{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalLinkedServiceClassification(rawMsg json.RawMessage) (LinkedServiceClassification, error) { @@ -1197,7 +1246,10 @@ func unmarshalLinkedServiceClassification(rawMsg json.RawMessage) (LinkedService default: b = &LinkedService{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalSecretBaseClassification(rawMsg json.RawMessage) (SecretBaseClassification, error) { @@ -1217,7 +1269,10 @@ func unmarshalSecretBaseClassification(rawMsg json.RawMessage) (SecretBaseClassi default: b = &SecretBase{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalSsisObjectMetadataClassification(rawMsg json.RawMessage) (SsisObjectMetadataClassification, error) { @@ -1241,7 +1296,10 @@ func unmarshalSsisObjectMetadataClassification(rawMsg json.RawMessage) (SsisObje default: b = &SsisObjectMetadata{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalSsisObjectMetadataClassificationArray(rawMsg json.RawMessage) ([]SsisObjectMetadataClassification, error) { @@ -1302,7 +1360,10 @@ func unmarshalStoreReadSettingsClassification(rawMsg json.RawMessage) (StoreRead default: b = &StoreReadSettings{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalStoreWriteSettingsClassification(rawMsg json.RawMessage) (StoreWriteSettingsClassification, error) { @@ -1330,7 +1391,10 @@ func unmarshalStoreWriteSettingsClassification(rawMsg json.RawMessage) (StoreWri default: b = &StoreWriteSettings{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalTriggerClassification(rawMsg json.RawMessage) (TriggerClassification, error) { @@ -1362,7 +1426,10 @@ func unmarshalTriggerClassification(rawMsg json.RawMessage) (TriggerClassificati default: b = &Trigger{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } func unmarshalWebLinkedServiceTypePropertiesClassification(rawMsg json.RawMessage) (WebLinkedServiceTypePropertiesClassification, error) { @@ -1384,5 +1451,8 @@ func unmarshalWebLinkedServiceTypePropertiesClassification(rawMsg json.RawMessag default: b = &WebLinkedServiceTypeProperties{} } - return b, json.Unmarshal(rawMsg, b) + if err := json.Unmarshal(rawMsg, b); err != nil { + return nil, err + } + return b, nil } diff --git a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client.go b/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client.go index 09a0fac07c98..9768153d306b 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,18 +53,21 @@ func NewPrivateEndpointConnectionClient(subscriptionID string, credential azcore // - options - PrivateEndpointConnectionClientCreateOrUpdateOptions contains the optional parameters for the PrivateEndpointConnectionClient.CreateOrUpdate // method. func (client *PrivateEndpointConnectionClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, privateEndpointConnectionName string, privateEndpointWrapper PrivateLinkConnectionApprovalRequestResource, options *PrivateEndpointConnectionClientCreateOrUpdateOptions) (PrivateEndpointConnectionClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, privateEndpointConnectionName, privateEndpointWrapper, options) if err != nil { return PrivateEndpointConnectionClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PrivateEndpointConnectionClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PrivateEndpointConnectionClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PrivateEndpointConnectionClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -98,7 +100,10 @@ func (client *PrivateEndpointConnectionClient) createOrUpdateCreateRequest(ctx c req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, privateEndpointWrapper) + if err := runtime.MarshalAsJSON(req, privateEndpointWrapper); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -120,16 +125,18 @@ func (client *PrivateEndpointConnectionClient) createOrUpdateHandleResponse(resp // - options - PrivateEndpointConnectionClientDeleteOptions contains the optional parameters for the PrivateEndpointConnectionClient.Delete // method. func (client *PrivateEndpointConnectionClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, privateEndpointConnectionName string, options *PrivateEndpointConnectionClientDeleteOptions) (PrivateEndpointConnectionClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, privateEndpointConnectionName, options) if err != nil { return PrivateEndpointConnectionClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PrivateEndpointConnectionClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return PrivateEndpointConnectionClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return PrivateEndpointConnectionClientDeleteResponse{}, err } return PrivateEndpointConnectionClientDeleteResponse{}, nil } @@ -174,18 +181,21 @@ func (client *PrivateEndpointConnectionClient) deleteCreateRequest(ctx context.C // - options - PrivateEndpointConnectionClientGetOptions contains the optional parameters for the PrivateEndpointConnectionClient.Get // method. func (client *PrivateEndpointConnectionClient) Get(ctx context.Context, resourceGroupName string, factoryName string, privateEndpointConnectionName string, options *PrivateEndpointConnectionClientGetOptions) (PrivateEndpointConnectionClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, privateEndpointConnectionName, options) if err != nil { return PrivateEndpointConnectionClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PrivateEndpointConnectionClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PrivateEndpointConnectionClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PrivateEndpointConnectionClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client_example_test.go deleted file mode 100644 index 015e6c3eb725..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnection_client_example_test.go +++ /dev/null @@ -1,119 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/ApproveRejectPrivateEndpointConnection.json -func ExamplePrivateEndpointConnectionClient_CreateOrUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPrivateEndpointConnectionClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "connection", armdatafactory.PrivateLinkConnectionApprovalRequestResource{ - Properties: &armdatafactory.PrivateLinkConnectionApprovalRequest{ - PrivateEndpoint: &armdatafactory.PrivateEndpoint{ - ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/privateEndpoints/myPrivateEndpoint"), - }, - PrivateLinkServiceConnectionState: &armdatafactory.PrivateLinkConnectionState{ - Description: to.Ptr("Approved by admin."), - ActionsRequired: to.Ptr(""), - Status: to.Ptr("Approved"), - }, - }, - }, &armdatafactory.PrivateEndpointConnectionClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PrivateEndpointConnectionResource = armdatafactory.PrivateEndpointConnectionResource{ - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/privateEndpointConnections"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Properties: &armdatafactory.RemotePrivateEndpointConnection{ - // PrivateEndpoint: &armdatafactory.ArmIDWrapper{ - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/privateEndpoints/myPrivateEndpoint"), - // }, - // PrivateLinkServiceConnectionState: &armdatafactory.PrivateLinkConnectionState{ - // Description: to.Ptr("Approved by admin."), - // ActionsRequired: to.Ptr(""), - // Status: to.Ptr("Approved"), - // }, - // ProvisioningState: to.Ptr("Succeeded"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GetPrivateEndpointConnection.json -func ExamplePrivateEndpointConnectionClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPrivateEndpointConnectionClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "connection", &armdatafactory.PrivateEndpointConnectionClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PrivateEndpointConnectionResource = armdatafactory.PrivateEndpointConnectionResource{ - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/privateEndpointConnections"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Properties: &armdatafactory.RemotePrivateEndpointConnection{ - // PrivateEndpoint: &armdatafactory.ArmIDWrapper{ - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/privateEndpoints/myPrivateEndpoint"), - // }, - // PrivateLinkServiceConnectionState: &armdatafactory.PrivateLinkConnectionState{ - // Description: to.Ptr("Approved by admin."), - // ActionsRequired: to.Ptr(""), - // Status: to.Ptr("Approved"), - // }, - // ProvisioningState: to.Ptr("Succeeded"), - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/DeletePrivateEndpointConnection.json -func ExamplePrivateEndpointConnectionClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewPrivateEndpointConnectionClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "connection", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client.go b/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client.go index 07f0c73ed380..4ee4a389bd77 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory diff --git a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client_example_test.go deleted file mode 100644 index ae7e9262eeaa..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/privateendpointconnections_client_example_test.go +++ /dev/null @@ -1,62 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/PrivateEndPointConnections_ListByFactory.json -func ExamplePrivateEndPointConnectionsClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewPrivateEndPointConnectionsClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.PrivateEndpointConnectionListResponse = armdatafactory.PrivateEndpointConnectionListResponse{ - // Value: []*armdatafactory.PrivateEndpointConnectionResource{ - // { - // Name: to.Ptr("factories"), - // Type: to.Ptr("Microsoft.DataFactory/factories/privateEndpointConnections"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/privateEndpoints/myPrivateEndpoint"), - // Properties: &armdatafactory.RemotePrivateEndpointConnection{ - // PrivateEndpoint: &armdatafactory.ArmIDWrapper{ - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/managedVirtualNetworks/myPrivateEndpoint"), - // }, - // PrivateLinkServiceConnectionState: &armdatafactory.PrivateLinkConnectionState{ - // Description: to.Ptr("Approved by admin."), - // ActionsRequired: to.Ptr("exampleActionsRequired"), - // Status: to.Ptr("Approved"), - // }, - // ProvisioningState: to.Ptr("Succeeded"), - // }, - // }}, - // } - } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client.go b/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client.go index dda1fb2b94ac..e2b427a04a46 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -53,18 +52,21 @@ func NewPrivateLinkResourcesClient(subscriptionID string, credential azcore.Toke // - options - PrivateLinkResourcesClientGetOptions contains the optional parameters for the PrivateLinkResourcesClient.Get // method. func (client *PrivateLinkResourcesClient) Get(ctx context.Context, resourceGroupName string, factoryName string, options *PrivateLinkResourcesClientGetOptions) (PrivateLinkResourcesClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, options) if err != nil { return PrivateLinkResourcesClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return PrivateLinkResourcesClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return PrivateLinkResourcesClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return PrivateLinkResourcesClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client_example_test.go deleted file mode 100644 index e0cacfeaad35..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/privatelinkresources_client_example_test.go +++ /dev/null @@ -1,53 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/GetPrivateLinkResources.json -func ExamplePrivateLinkResourcesClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewPrivateLinkResourcesClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.PrivateLinkResourcesWrapper = armdatafactory.PrivateLinkResourcesWrapper{ - // Value: []*armdatafactory.PrivateLinkResource{ - // { - // Name: to.Ptr("exampleFactoryName"), - // Type: to.Ptr("Microsoft.DataFactory/factories/privateLinkResources"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName"), - // Properties: &armdatafactory.PrivateLinkResourceProperties{ - // GroupID: to.Ptr("dataFactory"), - // RequiredMembers: []*string{ - // to.Ptr("dataFactory")}, - // RequiredZoneNames: []*string{ - // to.Ptr("privatelink.datafactory.azure.net")}, - // }, - // }}, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/response_types.go b/sdk/resourcemanager/datafactory/armdatafactory/response_types.go index c36654413b2c..0f59a530d354 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/response_types.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/response_types.go @@ -3,19 +3,59 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory // ActivityRunsClientQueryByPipelineRunResponse contains the response from method ActivityRunsClient.QueryByPipelineRun. type ActivityRunsClientQueryByPipelineRunResponse struct { + // A list activity runs. ActivityRunsQueryResponse } +// ChangeDataCaptureClientCreateOrUpdateResponse contains the response from method ChangeDataCaptureClient.CreateOrUpdate. +type ChangeDataCaptureClientCreateOrUpdateResponse struct { + // Change data capture resource type. + ChangeDataCaptureResource +} + +// ChangeDataCaptureClientDeleteResponse contains the response from method ChangeDataCaptureClient.Delete. +type ChangeDataCaptureClientDeleteResponse struct { + // placeholder for future response values +} + +// ChangeDataCaptureClientGetResponse contains the response from method ChangeDataCaptureClient.Get. +type ChangeDataCaptureClientGetResponse struct { + // Change data capture resource type. + ChangeDataCaptureResource +} + +// ChangeDataCaptureClientListByFactoryResponse contains the response from method ChangeDataCaptureClient.NewListByFactoryPager. +type ChangeDataCaptureClientListByFactoryResponse struct { + // A list of change data capture resources. + ChangeDataCaptureListResponse +} + +// ChangeDataCaptureClientStartResponse contains the response from method ChangeDataCaptureClient.Start. +type ChangeDataCaptureClientStartResponse struct { + // placeholder for future response values +} + +// ChangeDataCaptureClientStatusResponse contains the response from method ChangeDataCaptureClient.Status. +type ChangeDataCaptureClientStatusResponse struct { + // Current status of the change data capture resource. + Value *string +} + +// ChangeDataCaptureClientStopResponse contains the response from method ChangeDataCaptureClient.Stop. +type ChangeDataCaptureClientStopResponse struct { + // placeholder for future response values +} + // CredentialOperationsClientCreateOrUpdateResponse contains the response from method CredentialOperationsClient.CreateOrUpdate. type CredentialOperationsClientCreateOrUpdateResponse struct { + // Credential resource type. ManagedIdentityCredentialResource } @@ -26,21 +66,25 @@ type CredentialOperationsClientDeleteResponse struct { // CredentialOperationsClientGetResponse contains the response from method CredentialOperationsClient.Get. type CredentialOperationsClientGetResponse struct { + // Credential resource type. ManagedIdentityCredentialResource } // CredentialOperationsClientListByFactoryResponse contains the response from method CredentialOperationsClient.NewListByFactoryPager. type CredentialOperationsClientListByFactoryResponse struct { + // A list of credential resources. CredentialListResponse } // DataFlowDebugSessionClientAddDataFlowResponse contains the response from method DataFlowDebugSessionClient.AddDataFlow. type DataFlowDebugSessionClientAddDataFlowResponse struct { + // Response body structure for starting data flow debug session. AddDataFlowToDebugSessionResponse } // DataFlowDebugSessionClientCreateResponse contains the response from method DataFlowDebugSessionClient.BeginCreate. type DataFlowDebugSessionClientCreateResponse struct { + // Response body structure for creating data flow debug session. CreateDataFlowDebugSessionResponse } @@ -51,16 +95,19 @@ type DataFlowDebugSessionClientDeleteResponse struct { // DataFlowDebugSessionClientExecuteCommandResponse contains the response from method DataFlowDebugSessionClient.BeginExecuteCommand. type DataFlowDebugSessionClientExecuteCommandResponse struct { + // Response body structure of data flow result for data preview, statistics or expression preview. DataFlowDebugCommandResponse } // DataFlowDebugSessionClientQueryByFactoryResponse contains the response from method DataFlowDebugSessionClient.NewQueryByFactoryPager. type DataFlowDebugSessionClientQueryByFactoryResponse struct { + // A list of active debug sessions. QueryDataFlowDebugSessionsResponse } // DataFlowsClientCreateOrUpdateResponse contains the response from method DataFlowsClient.CreateOrUpdate. type DataFlowsClientCreateOrUpdateResponse struct { + // Data flow resource type. DataFlowResource } @@ -71,16 +118,19 @@ type DataFlowsClientDeleteResponse struct { // DataFlowsClientGetResponse contains the response from method DataFlowsClient.Get. type DataFlowsClientGetResponse struct { + // Data flow resource type. DataFlowResource } // DataFlowsClientListByFactoryResponse contains the response from method DataFlowsClient.NewListByFactoryPager. type DataFlowsClientListByFactoryResponse struct { + // A list of data flow resources. DataFlowListResponse } // DatasetsClientCreateOrUpdateResponse contains the response from method DatasetsClient.CreateOrUpdate. type DatasetsClientCreateOrUpdateResponse struct { + // Dataset resource type. DatasetResource } @@ -91,36 +141,43 @@ type DatasetsClientDeleteResponse struct { // DatasetsClientGetResponse contains the response from method DatasetsClient.Get. type DatasetsClientGetResponse struct { + // Dataset resource type. DatasetResource } // DatasetsClientListByFactoryResponse contains the response from method DatasetsClient.NewListByFactoryPager. type DatasetsClientListByFactoryResponse struct { + // A list of dataset resources. DatasetListResponse } // ExposureControlClientGetFeatureValueByFactoryResponse contains the response from method ExposureControlClient.GetFeatureValueByFactory. type ExposureControlClientGetFeatureValueByFactoryResponse struct { + // The exposure control response. ExposureControlResponse } // ExposureControlClientGetFeatureValueResponse contains the response from method ExposureControlClient.GetFeatureValue. type ExposureControlClientGetFeatureValueResponse struct { + // The exposure control response. ExposureControlResponse } // ExposureControlClientQueryFeatureValuesByFactoryResponse contains the response from method ExposureControlClient.QueryFeatureValuesByFactory. type ExposureControlClientQueryFeatureValuesByFactoryResponse struct { + // A list of exposure control feature values. ExposureControlBatchResponse } // FactoriesClientConfigureFactoryRepoResponse contains the response from method FactoriesClient.ConfigureFactoryRepo. type FactoriesClientConfigureFactoryRepoResponse struct { + // Factory resource type. Factory } // FactoriesClientCreateOrUpdateResponse contains the response from method FactoriesClient.CreateOrUpdate. type FactoriesClientCreateOrUpdateResponse struct { + // Factory resource type. Factory } @@ -131,36 +188,43 @@ type FactoriesClientDeleteResponse struct { // FactoriesClientGetDataPlaneAccessResponse contains the response from method FactoriesClient.GetDataPlaneAccess. type FactoriesClientGetDataPlaneAccessResponse struct { + // Get Data Plane read only token response definition. AccessPolicyResponse } // FactoriesClientGetGitHubAccessTokenResponse contains the response from method FactoriesClient.GetGitHubAccessToken. type FactoriesClientGetGitHubAccessTokenResponse struct { + // Get GitHub access token response definition. GitHubAccessTokenResponse } // FactoriesClientGetResponse contains the response from method FactoriesClient.Get. type FactoriesClientGetResponse struct { + // Factory resource type. Factory } // FactoriesClientListByResourceGroupResponse contains the response from method FactoriesClient.NewListByResourceGroupPager. type FactoriesClientListByResourceGroupResponse struct { + // A list of factory resources. FactoryListResponse } // FactoriesClientListResponse contains the response from method FactoriesClient.NewListPager. type FactoriesClientListResponse struct { + // A list of factory resources. FactoryListResponse } // FactoriesClientUpdateResponse contains the response from method FactoriesClient.Update. type FactoriesClientUpdateResponse struct { + // Factory resource type. Factory } // GlobalParametersClientCreateOrUpdateResponse contains the response from method GlobalParametersClient.CreateOrUpdate. type GlobalParametersClientCreateOrUpdateResponse struct { + // Global parameters resource type. GlobalParameterResource } @@ -171,11 +235,13 @@ type GlobalParametersClientDeleteResponse struct { // GlobalParametersClientGetResponse contains the response from method GlobalParametersClient.Get. type GlobalParametersClientGetResponse struct { + // Global parameters resource type. GlobalParameterResource } // GlobalParametersClientListByFactoryResponse contains the response from method GlobalParametersClient.NewListByFactoryPager. type GlobalParametersClientListByFactoryResponse struct { + // A list of Global parameters. GlobalParameterListResponse } @@ -186,36 +252,43 @@ type IntegrationRuntimeNodesClientDeleteResponse struct { // IntegrationRuntimeNodesClientGetIPAddressResponse contains the response from method IntegrationRuntimeNodesClient.GetIPAddress. type IntegrationRuntimeNodesClientGetIPAddressResponse struct { + // The IP address of self-hosted integration runtime node. IntegrationRuntimeNodeIPAddress } // IntegrationRuntimeNodesClientGetResponse contains the response from method IntegrationRuntimeNodesClient.Get. type IntegrationRuntimeNodesClientGetResponse struct { + // Properties of Self-hosted integration runtime node. SelfHostedIntegrationRuntimeNode } // IntegrationRuntimeNodesClientUpdateResponse contains the response from method IntegrationRuntimeNodesClient.Update. type IntegrationRuntimeNodesClientUpdateResponse struct { + // Properties of Self-hosted integration runtime node. SelfHostedIntegrationRuntimeNode } // IntegrationRuntimeObjectMetadataClientGetResponse contains the response from method IntegrationRuntimeObjectMetadataClient.Get. type IntegrationRuntimeObjectMetadataClientGetResponse struct { + // A list of SSIS object metadata. SsisObjectMetadataListResponse } // IntegrationRuntimeObjectMetadataClientRefreshResponse contains the response from method IntegrationRuntimeObjectMetadataClient.BeginRefresh. type IntegrationRuntimeObjectMetadataClientRefreshResponse struct { + // The status of the operation. SsisObjectMetadataStatusResponse } // IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse contains the response from method IntegrationRuntimesClient.CreateLinkedIntegrationRuntime. type IntegrationRuntimesClientCreateLinkedIntegrationRuntimeResponse struct { + // Integration runtime status response. IntegrationRuntimeStatusResponse } // IntegrationRuntimesClientCreateOrUpdateResponse contains the response from method IntegrationRuntimesClient.CreateOrUpdate. type IntegrationRuntimesClientCreateOrUpdateResponse struct { + // Integration runtime resource type. IntegrationRuntimeResource } @@ -226,41 +299,49 @@ type IntegrationRuntimesClientDeleteResponse struct { // IntegrationRuntimesClientGetConnectionInfoResponse contains the response from method IntegrationRuntimesClient.GetConnectionInfo. type IntegrationRuntimesClientGetConnectionInfoResponse struct { + // Connection information for encrypting the on-premises data source credentials. IntegrationRuntimeConnectionInfo } // IntegrationRuntimesClientGetMonitoringDataResponse contains the response from method IntegrationRuntimesClient.GetMonitoringData. type IntegrationRuntimesClientGetMonitoringDataResponse struct { + // Get monitoring data response. IntegrationRuntimeMonitoringData } // IntegrationRuntimesClientGetResponse contains the response from method IntegrationRuntimesClient.Get. type IntegrationRuntimesClientGetResponse struct { + // Integration runtime resource type. IntegrationRuntimeResource } // IntegrationRuntimesClientGetStatusResponse contains the response from method IntegrationRuntimesClient.GetStatus. type IntegrationRuntimesClientGetStatusResponse struct { + // Integration runtime status response. IntegrationRuntimeStatusResponse } // IntegrationRuntimesClientListAuthKeysResponse contains the response from method IntegrationRuntimesClient.ListAuthKeys. type IntegrationRuntimesClientListAuthKeysResponse struct { + // The integration runtime authentication keys. IntegrationRuntimeAuthKeys } // IntegrationRuntimesClientListByFactoryResponse contains the response from method IntegrationRuntimesClient.NewListByFactoryPager. type IntegrationRuntimesClientListByFactoryResponse struct { + // A list of integration runtime resources. IntegrationRuntimeListResponse } // IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse contains the response from method IntegrationRuntimesClient.ListOutboundNetworkDependenciesEndpoints. type IntegrationRuntimesClientListOutboundNetworkDependenciesEndpointsResponse struct { + // Azure-SSIS integration runtime outbound network dependency endpoints. IntegrationRuntimeOutboundNetworkDependenciesEndpointsResponse } // IntegrationRuntimesClientRegenerateAuthKeyResponse contains the response from method IntegrationRuntimesClient.RegenerateAuthKey. type IntegrationRuntimesClientRegenerateAuthKeyResponse struct { + // The integration runtime authentication keys. IntegrationRuntimeAuthKeys } @@ -271,6 +352,7 @@ type IntegrationRuntimesClientRemoveLinksResponse struct { // IntegrationRuntimesClientStartResponse contains the response from method IntegrationRuntimesClient.BeginStart. type IntegrationRuntimesClientStartResponse struct { + // Integration runtime status response. IntegrationRuntimeStatusResponse } @@ -286,6 +368,7 @@ type IntegrationRuntimesClientSyncCredentialsResponse struct { // IntegrationRuntimesClientUpdateResponse contains the response from method IntegrationRuntimesClient.Update. type IntegrationRuntimesClientUpdateResponse struct { + // Integration runtime resource type. IntegrationRuntimeResource } @@ -296,6 +379,7 @@ type IntegrationRuntimesClientUpgradeResponse struct { // LinkedServicesClientCreateOrUpdateResponse contains the response from method LinkedServicesClient.CreateOrUpdate. type LinkedServicesClientCreateOrUpdateResponse struct { + // Linked service resource type. LinkedServiceResource } @@ -306,16 +390,19 @@ type LinkedServicesClientDeleteResponse struct { // LinkedServicesClientGetResponse contains the response from method LinkedServicesClient.Get. type LinkedServicesClientGetResponse struct { + // Linked service resource type. LinkedServiceResource } // LinkedServicesClientListByFactoryResponse contains the response from method LinkedServicesClient.NewListByFactoryPager. type LinkedServicesClientListByFactoryResponse struct { + // A list of linked service resources. LinkedServiceListResponse } // ManagedPrivateEndpointsClientCreateOrUpdateResponse contains the response from method ManagedPrivateEndpointsClient.CreateOrUpdate. type ManagedPrivateEndpointsClientCreateOrUpdateResponse struct { + // Managed private endpoint resource type. ManagedPrivateEndpointResource } @@ -326,31 +413,37 @@ type ManagedPrivateEndpointsClientDeleteResponse struct { // ManagedPrivateEndpointsClientGetResponse contains the response from method ManagedPrivateEndpointsClient.Get. type ManagedPrivateEndpointsClientGetResponse struct { + // Managed private endpoint resource type. ManagedPrivateEndpointResource } // ManagedPrivateEndpointsClientListByFactoryResponse contains the response from method ManagedPrivateEndpointsClient.NewListByFactoryPager. type ManagedPrivateEndpointsClientListByFactoryResponse struct { + // A list of managed private endpoint resources. ManagedPrivateEndpointListResponse } // ManagedVirtualNetworksClientCreateOrUpdateResponse contains the response from method ManagedVirtualNetworksClient.CreateOrUpdate. type ManagedVirtualNetworksClientCreateOrUpdateResponse struct { + // Managed Virtual Network resource type. ManagedVirtualNetworkResource } // ManagedVirtualNetworksClientGetResponse contains the response from method ManagedVirtualNetworksClient.Get. type ManagedVirtualNetworksClientGetResponse struct { + // Managed Virtual Network resource type. ManagedVirtualNetworkResource } // ManagedVirtualNetworksClientListByFactoryResponse contains the response from method ManagedVirtualNetworksClient.NewListByFactoryPager. type ManagedVirtualNetworksClientListByFactoryResponse struct { + // A list of managed Virtual Network resources. ManagedVirtualNetworkListResponse } // OperationsClientListResponse contains the response from method OperationsClient.NewListPager. type OperationsClientListResponse struct { + // A list of operations that can be performed by the Data Factory service. OperationListResponse } @@ -361,21 +454,25 @@ type PipelineRunsClientCancelResponse struct { // PipelineRunsClientGetResponse contains the response from method PipelineRunsClient.Get. type PipelineRunsClientGetResponse struct { + // Information about a pipeline run. PipelineRun } // PipelineRunsClientQueryByFactoryResponse contains the response from method PipelineRunsClient.QueryByFactory. type PipelineRunsClientQueryByFactoryResponse struct { + // A list pipeline runs. PipelineRunsQueryResponse } // PipelinesClientCreateOrUpdateResponse contains the response from method PipelinesClient.CreateOrUpdate. type PipelinesClientCreateOrUpdateResponse struct { + // Pipeline resource type. PipelineResource } // PipelinesClientCreateRunResponse contains the response from method PipelinesClient.CreateRun. type PipelinesClientCreateRunResponse struct { + // Response body with a run identifier. CreateRunResponse } @@ -386,21 +483,25 @@ type PipelinesClientDeleteResponse struct { // PipelinesClientGetResponse contains the response from method PipelinesClient.Get. type PipelinesClientGetResponse struct { + // Pipeline resource type. PipelineResource } // PipelinesClientListByFactoryResponse contains the response from method PipelinesClient.NewListByFactoryPager. type PipelinesClientListByFactoryResponse struct { + // A list of pipeline resources. PipelineListResponse } // PrivateEndPointConnectionsClientListByFactoryResponse contains the response from method PrivateEndPointConnectionsClient.NewListByFactoryPager. type PrivateEndPointConnectionsClientListByFactoryResponse struct { + // A list of linked service resources. PrivateEndpointConnectionListResponse } // PrivateEndpointConnectionClientCreateOrUpdateResponse contains the response from method PrivateEndpointConnectionClient.CreateOrUpdate. type PrivateEndpointConnectionClientCreateOrUpdateResponse struct { + // Private Endpoint Connection ARM resource. PrivateEndpointConnectionResource } @@ -411,11 +512,13 @@ type PrivateEndpointConnectionClientDeleteResponse struct { // PrivateEndpointConnectionClientGetResponse contains the response from method PrivateEndpointConnectionClient.Get. type PrivateEndpointConnectionClientGetResponse struct { + // Private Endpoint Connection ARM resource. PrivateEndpointConnectionResource } // PrivateLinkResourcesClientGetResponse contains the response from method PrivateLinkResourcesClient.Get. type PrivateLinkResourcesClientGetResponse struct { + // Wrapper for a collection of private link resources PrivateLinkResourcesWrapper } @@ -426,6 +529,7 @@ type TriggerRunsClientCancelResponse struct { // TriggerRunsClientQueryByFactoryResponse contains the response from method TriggerRunsClient.QueryByFactory. type TriggerRunsClientQueryByFactoryResponse struct { + // A list of trigger runs. TriggerRunsQueryResponse } @@ -436,6 +540,7 @@ type TriggerRunsClientRerunResponse struct { // TriggersClientCreateOrUpdateResponse contains the response from method TriggersClient.CreateOrUpdate. type TriggersClientCreateOrUpdateResponse struct { + // Trigger resource type. TriggerResource } @@ -446,21 +551,25 @@ type TriggersClientDeleteResponse struct { // TriggersClientGetEventSubscriptionStatusResponse contains the response from method TriggersClient.GetEventSubscriptionStatus. type TriggersClientGetEventSubscriptionStatusResponse struct { + // Defines the response of a trigger subscription operation. TriggerSubscriptionOperationStatus } // TriggersClientGetResponse contains the response from method TriggersClient.Get. type TriggersClientGetResponse struct { + // Trigger resource type. TriggerResource } // TriggersClientListByFactoryResponse contains the response from method TriggersClient.NewListByFactoryPager. type TriggersClientListByFactoryResponse struct { + // A list of trigger resources. TriggerListResponse } // TriggersClientQueryByFactoryResponse contains the response from method TriggersClient.QueryByFactory. type TriggersClientQueryByFactoryResponse struct { + // A query of triggers. TriggerQueryResponse } @@ -476,10 +585,12 @@ type TriggersClientStopResponse struct { // TriggersClientSubscribeToEventsResponse contains the response from method TriggersClient.BeginSubscribeToEvents. type TriggersClientSubscribeToEventsResponse struct { + // Defines the response of a trigger subscription operation. TriggerSubscriptionOperationStatus } // TriggersClientUnsubscribeFromEventsResponse contains the response from method TriggersClient.BeginUnsubscribeFromEvents. type TriggersClientUnsubscribeFromEventsResponse struct { + // Defines the response of a trigger subscription operation. TriggerSubscriptionOperationStatus } diff --git a/sdk/resourcemanager/datafactory/armdatafactory/time_rfc3339.go b/sdk/resourcemanager/datafactory/armdatafactory/time_rfc3339.go index 2019e5792472..16833db863d2 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/time_rfc3339.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/time_rfc3339.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory diff --git a/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client.go b/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client.go index cd9a30bda4bc..b4c2a0e63aeb 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,16 +53,18 @@ func NewTriggerRunsClient(subscriptionID string, credential azcore.TokenCredenti // - runID - The pipeline run identifier. // - options - TriggerRunsClientCancelOptions contains the optional parameters for the TriggerRunsClient.Cancel method. func (client *TriggerRunsClient) Cancel(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, runID string, options *TriggerRunsClientCancelOptions) (TriggerRunsClientCancelResponse, error) { + var err error req, err := client.cancelCreateRequest(ctx, resourceGroupName, factoryName, triggerName, runID, options) if err != nil { return TriggerRunsClientCancelResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggerRunsClientCancelResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return TriggerRunsClientCancelResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return TriggerRunsClientCancelResponse{}, err } return TriggerRunsClientCancelResponse{}, nil } @@ -112,18 +113,21 @@ func (client *TriggerRunsClient) cancelCreateRequest(ctx context.Context, resour // - options - TriggerRunsClientQueryByFactoryOptions contains the optional parameters for the TriggerRunsClient.QueryByFactory // method. func (client *TriggerRunsClient) QueryByFactory(ctx context.Context, resourceGroupName string, factoryName string, filterParameters RunFilterParameters, options *TriggerRunsClientQueryByFactoryOptions) (TriggerRunsClientQueryByFactoryResponse, error) { + var err error req, err := client.queryByFactoryCreateRequest(ctx, resourceGroupName, factoryName, filterParameters, options) if err != nil { return TriggerRunsClientQueryByFactoryResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggerRunsClientQueryByFactoryResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return TriggerRunsClientQueryByFactoryResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return TriggerRunsClientQueryByFactoryResponse{}, err } - return client.queryByFactoryHandleResponse(resp) + resp, err := client.queryByFactoryHandleResponse(httpResp) + return resp, err } // queryByFactoryCreateRequest creates the QueryByFactory request. @@ -149,7 +153,10 @@ func (client *TriggerRunsClient) queryByFactoryCreateRequest(ctx context.Context reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, filterParameters) + if err := runtime.MarshalAsJSON(req, filterParameters); err != nil { + return nil, err + } + return req, nil } // queryByFactoryHandleResponse handles the QueryByFactory response. @@ -171,16 +178,18 @@ func (client *TriggerRunsClient) queryByFactoryHandleResponse(resp *http.Respons // - runID - The pipeline run identifier. // - options - TriggerRunsClientRerunOptions contains the optional parameters for the TriggerRunsClient.Rerun method. func (client *TriggerRunsClient) Rerun(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, runID string, options *TriggerRunsClientRerunOptions) (TriggerRunsClientRerunResponse, error) { + var err error req, err := client.rerunCreateRequest(ctx, resourceGroupName, factoryName, triggerName, runID, options) if err != nil { return TriggerRunsClientRerunResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggerRunsClientRerunResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return TriggerRunsClientRerunResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return TriggerRunsClientRerunResponse{}, err } return TriggerRunsClientRerunResponse{}, nil } diff --git a/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client_example_test.go deleted file mode 100644 index b563619751d3..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/triggerruns_client_example_test.go +++ /dev/null @@ -1,103 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "time" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Rerun.json -func ExampleTriggerRunsClient_Rerun() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewTriggerRunsClient().Rerun(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_Cancel.json -func ExampleTriggerRunsClient_Cancel() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewTriggerRunsClient().Cancel(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/TriggerRuns_QueryByFactory.json -func ExampleTriggerRunsClient_QueryByFactory() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewTriggerRunsClient().QueryByFactory(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.RunFilterParameters{ - Filters: []*armdatafactory.RunQueryFilter{ - { - Operand: to.Ptr(armdatafactory.RunQueryFilterOperandTriggerName), - Operator: to.Ptr(armdatafactory.RunQueryFilterOperatorEquals), - Values: []*string{ - to.Ptr("exampleTrigger")}, - }}, - LastUpdatedAfter: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:36:44.3345758Z"); return t }()), - LastUpdatedBefore: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:49:48.3686473Z"); return t }()), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerRunsQueryResponse = armdatafactory.TriggerRunsQueryResponse{ - // Value: []*armdatafactory.TriggerRun{ - // { - // Message: to.Ptr(""), - // Properties: map[string]*string{ - // "ScheduleTime": to.Ptr("6/16/2018 12:43:14 AM"), - // "TriggerTime": to.Ptr("6/16/2018 12:43:15 AM"), - // }, - // Status: to.Ptr(armdatafactory.TriggerRunStatusSucceeded), - // TriggerName: to.Ptr("exampleTrigger"), - // TriggerRunID: to.Ptr("08586724970898148904457116912CU27"), - // TriggerRunTimestamp: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:43:15.660141Z"); return t}()), - // TriggerType: to.Ptr("ScheduleTrigger"), - // TriggeredPipelines: map[string]*string{ - // "examplePipeline": to.Ptr("9f3ce8b3-37d7-43eb-96ac-a656c0476283"), - // }, - // }}, - // } -} diff --git a/sdk/resourcemanager/datafactory/armdatafactory/triggers_client.go b/sdk/resourcemanager/datafactory/armdatafactory/triggers_client.go index 0a48401a9643..850f843542f8 100644 --- a/sdk/resourcemanager/datafactory/armdatafactory/triggers_client.go +++ b/sdk/resourcemanager/datafactory/armdatafactory/triggers_client.go @@ -3,9 +3,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. +// Code generated by Microsoft (R) AutoRest Code Generator. DO NOT EDIT. // Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. package armdatafactory @@ -54,18 +53,21 @@ func NewTriggersClient(subscriptionID string, credential azcore.TokenCredential, // - trigger - Trigger resource definition. // - options - TriggersClientCreateOrUpdateOptions contains the optional parameters for the TriggersClient.CreateOrUpdate method. func (client *TriggersClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, trigger TriggerResource, options *TriggersClientCreateOrUpdateOptions) (TriggersClientCreateOrUpdateResponse, error) { + var err error req, err := client.createOrUpdateCreateRequest(ctx, resourceGroupName, factoryName, triggerName, trigger, options) if err != nil { return TriggersClientCreateOrUpdateResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggersClientCreateOrUpdateResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return TriggersClientCreateOrUpdateResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return TriggersClientCreateOrUpdateResponse{}, err } - return client.createOrUpdateHandleResponse(resp) + resp, err := client.createOrUpdateHandleResponse(httpResp) + return resp, err } // createOrUpdateCreateRequest creates the CreateOrUpdate request. @@ -98,7 +100,10 @@ func (client *TriggersClient) createOrUpdateCreateRequest(ctx context.Context, r req.Raw().Header["If-Match"] = []string{*options.IfMatch} } req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, trigger) + if err := runtime.MarshalAsJSON(req, trigger); err != nil { + return nil, err + } + return req, nil } // createOrUpdateHandleResponse handles the CreateOrUpdate response. @@ -119,16 +124,18 @@ func (client *TriggersClient) createOrUpdateHandleResponse(resp *http.Response) // - triggerName - The trigger name. // - options - TriggersClientDeleteOptions contains the optional parameters for the TriggersClient.Delete method. func (client *TriggersClient) Delete(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientDeleteOptions) (TriggersClientDeleteResponse, error) { + var err error req, err := client.deleteCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return TriggersClientDeleteResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggersClientDeleteResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNoContent) { - return TriggersClientDeleteResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNoContent) { + err = runtime.NewResponseError(httpResp) + return TriggersClientDeleteResponse{}, err } return TriggersClientDeleteResponse{}, nil } @@ -172,18 +179,21 @@ func (client *TriggersClient) deleteCreateRequest(ctx context.Context, resourceG // - triggerName - The trigger name. // - options - TriggersClientGetOptions contains the optional parameters for the TriggersClient.Get method. func (client *TriggersClient) Get(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientGetOptions) (TriggersClientGetResponse, error) { + var err error req, err := client.getCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return TriggersClientGetResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggersClientGetResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusNotModified) { - return TriggersClientGetResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusNotModified) { + err = runtime.NewResponseError(httpResp) + return TriggersClientGetResponse{}, err } - return client.getHandleResponse(resp) + resp, err := client.getHandleResponse(httpResp) + return resp, err } // getCreateRequest creates the Get request. @@ -238,18 +248,21 @@ func (client *TriggersClient) getHandleResponse(resp *http.Response) (TriggersCl // - options - TriggersClientGetEventSubscriptionStatusOptions contains the optional parameters for the TriggersClient.GetEventSubscriptionStatus // method. func (client *TriggersClient) GetEventSubscriptionStatus(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientGetEventSubscriptionStatusOptions) (TriggersClientGetEventSubscriptionStatusResponse, error) { + var err error req, err := client.getEventSubscriptionStatusCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return TriggersClientGetEventSubscriptionStatusResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggersClientGetEventSubscriptionStatusResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return TriggersClientGetEventSubscriptionStatusResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return TriggersClientGetEventSubscriptionStatusResponse{}, err } - return client.getEventSubscriptionStatusHandleResponse(resp) + resp, err := client.getEventSubscriptionStatusHandleResponse(httpResp) + return resp, err } // getEventSubscriptionStatusCreateRequest creates the GetEventSubscriptionStatus request. @@ -370,18 +383,21 @@ func (client *TriggersClient) listByFactoryHandleResponse(resp *http.Response) ( // - filterParameters - Parameters to filter the triggers. // - options - TriggersClientQueryByFactoryOptions contains the optional parameters for the TriggersClient.QueryByFactory method. func (client *TriggersClient) QueryByFactory(ctx context.Context, resourceGroupName string, factoryName string, filterParameters TriggerFilterParameters, options *TriggersClientQueryByFactoryOptions) (TriggersClientQueryByFactoryResponse, error) { + var err error req, err := client.queryByFactoryCreateRequest(ctx, resourceGroupName, factoryName, filterParameters, options) if err != nil { return TriggersClientQueryByFactoryResponse{}, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return TriggersClientQueryByFactoryResponse{}, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return TriggersClientQueryByFactoryResponse{}, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return TriggersClientQueryByFactoryResponse{}, err } - return client.queryByFactoryHandleResponse(resp) + resp, err := client.queryByFactoryHandleResponse(httpResp) + return resp, err } // queryByFactoryCreateRequest creates the QueryByFactory request. @@ -407,7 +423,10 @@ func (client *TriggersClient) queryByFactoryCreateRequest(ctx context.Context, r reqQP.Set("api-version", "2018-06-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} - return req, runtime.MarshalAsJSON(req, filterParameters) + if err := runtime.MarshalAsJSON(req, filterParameters); err != nil { + return nil, err + } + return req, nil } // queryByFactoryHandleResponse handles the QueryByFactory response. @@ -433,7 +452,8 @@ func (client *TriggersClient) BeginStart(ctx context.Context, resourceGroupName if err != nil { return nil, err } - return runtime.NewPoller[TriggersClientStartResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[TriggersClientStartResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[TriggersClientStartResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -444,18 +464,20 @@ func (client *TriggersClient) BeginStart(ctx context.Context, resourceGroupName // // Generated from API version 2018-06-01 func (client *TriggersClient) start(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientBeginStartOptions) (*http.Response, error) { + var err error req, err := client.startCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // startCreateRequest creates the Start request. @@ -502,7 +524,8 @@ func (client *TriggersClient) BeginStop(ctx context.Context, resourceGroupName s if err != nil { return nil, err } - return runtime.NewPoller[TriggersClientStopResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[TriggersClientStopResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[TriggersClientStopResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -513,18 +536,20 @@ func (client *TriggersClient) BeginStop(ctx context.Context, resourceGroupName s // // Generated from API version 2018-06-01 func (client *TriggersClient) stop(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientBeginStopOptions) (*http.Response, error) { + var err error req, err := client.stopCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // stopCreateRequest creates the Stop request. @@ -572,7 +597,8 @@ func (client *TriggersClient) BeginSubscribeToEvents(ctx context.Context, resour if err != nil { return nil, err } - return runtime.NewPoller[TriggersClientSubscribeToEventsResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[TriggersClientSubscribeToEventsResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[TriggersClientSubscribeToEventsResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -583,18 +609,20 @@ func (client *TriggersClient) BeginSubscribeToEvents(ctx context.Context, resour // // Generated from API version 2018-06-01 func (client *TriggersClient) subscribeToEvents(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientBeginSubscribeToEventsOptions) (*http.Response, error) { + var err error req, err := client.subscribeToEventsCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // subscribeToEventsCreateRequest creates the SubscribeToEvents request. @@ -642,7 +670,8 @@ func (client *TriggersClient) BeginUnsubscribeFromEvents(ctx context.Context, re if err != nil { return nil, err } - return runtime.NewPoller[TriggersClientUnsubscribeFromEventsResponse](resp, client.internal.Pipeline(), nil) + poller, err := runtime.NewPoller[TriggersClientUnsubscribeFromEventsResponse](resp, client.internal.Pipeline(), nil) + return poller, err } else { return runtime.NewPollerFromResumeToken[TriggersClientUnsubscribeFromEventsResponse](options.ResumeToken, client.internal.Pipeline(), nil) } @@ -653,18 +682,20 @@ func (client *TriggersClient) BeginUnsubscribeFromEvents(ctx context.Context, re // // Generated from API version 2018-06-01 func (client *TriggersClient) unsubscribeFromEvents(ctx context.Context, resourceGroupName string, factoryName string, triggerName string, options *TriggersClientBeginUnsubscribeFromEventsOptions) (*http.Response, error) { + var err error req, err := client.unsubscribeFromEventsCreateRequest(ctx, resourceGroupName, factoryName, triggerName, options) if err != nil { return nil, err } - resp, err := client.internal.Pipeline().Do(req) + httpResp, err := client.internal.Pipeline().Do(req) if err != nil { return nil, err } - if !runtime.HasStatusCode(resp, http.StatusOK, http.StatusAccepted) { - return nil, runtime.NewResponseError(resp) + if !runtime.HasStatusCode(httpResp, http.StatusOK, http.StatusAccepted) { + err = runtime.NewResponseError(httpResp) + return nil, err } - return resp, nil + return httpResp, nil } // unsubscribeFromEventsCreateRequest creates the UnsubscribeFromEvents request. diff --git a/sdk/resourcemanager/datafactory/armdatafactory/triggers_client_example_test.go b/sdk/resourcemanager/datafactory/armdatafactory/triggers_client_example_test.go deleted file mode 100644 index d4bcade60d49..000000000000 --- a/sdk/resourcemanager/datafactory/armdatafactory/triggers_client_example_test.go +++ /dev/null @@ -1,465 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. See License.txt in the project root for license information. -// Code generated by Microsoft (R) AutoRest Code Generator. -// Changes may cause incorrect behavior and will be lost if the code is regenerated. -// DO NOT EDIT. - -package armdatafactory_test - -import ( - "context" - "log" - - "time" - - "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/datafactory/armdatafactory/v3" -) - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_ListByFactory.json -func ExampleTriggersClient_NewListByFactoryPager() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - pager := clientFactory.NewTriggersClient().NewListByFactoryPager("exampleResourceGroup", "exampleFactoryName", nil) - for pager.More() { - page, err := pager.NextPage(ctx) - if err != nil { - log.Fatalf("failed to advance page: %v", err) - } - for _, v := range page.Value { - // You could use page here. We use blank identifier for just demo purposes. - _ = v - } - // If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // page.TriggerListResponse = armdatafactory.TriggerListResponse{ - // Value: []*armdatafactory.TriggerResource{ - // { - // Name: to.Ptr("exampleTrigger"), - // Type: to.Ptr("Microsoft.DataFactory/factories/triggers"), - // Etag: to.Ptr("0a008ed4-0000-0000-0000-5b245c740000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger"), - // Properties: &armdatafactory.ScheduleTrigger{ - // Type: to.Ptr("ScheduleTrigger"), - // Description: to.Ptr("Example description"), - // RuntimeState: to.Ptr(armdatafactory.TriggerRuntimeStateStarted), - // Pipelines: []*armdatafactory.TriggerPipelineReference{ - // { - // Parameters: map[string]any{ - // "OutputBlobNameList": []any{ - // "exampleoutput.csv", - // }, - // }, - // PipelineReference: &armdatafactory.PipelineReference{ - // Type: to.Ptr(armdatafactory.PipelineReferenceTypePipelineReference), - // ReferenceName: to.Ptr("examplePipeline"), - // }, - // }}, - // TypeProperties: &armdatafactory.ScheduleTriggerTypeProperties{ - // Recurrence: &armdatafactory.ScheduleTriggerRecurrence{ - // EndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:55:14.905167Z"); return t}()), - // Frequency: to.Ptr(armdatafactory.RecurrenceFrequencyMinute), - // Interval: to.Ptr[int32](4), - // StartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:14.905167Z"); return t}()), - // TimeZone: to.Ptr("UTC"), - // }, - // }, - // }, - // }}, - // } - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_QueryByFactory.json -func ExampleTriggersClient_QueryByFactory() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewTriggersClient().QueryByFactory(ctx, "exampleResourceGroup", "exampleFactoryName", armdatafactory.TriggerFilterParameters{ - ParentTriggerName: to.Ptr("exampleTrigger"), - }, nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerQueryResponse = armdatafactory.TriggerQueryResponse{ - // Value: []*armdatafactory.TriggerResource{ - // { - // Name: to.Ptr("exampleRerunTrigger"), - // Type: to.Ptr("Microsoft.DataFactory/factories/triggers"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleRerunTrigger"), - // Properties: &armdatafactory.RerunTumblingWindowTrigger{ - // Type: to.Ptr("RerunTumblingWindowTrigger"), - // Description: to.Ptr("Example description"), - // TypeProperties: &armdatafactory.RerunTumblingWindowTriggerTypeProperties{ - // ParentTrigger: "exampleTrigger", - // RequestedEndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:55:14.905167Z"); return t}()), - // RequestedStartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:14.905167Z"); return t}()), - // RerunConcurrency: to.Ptr[int32](4), - // }, - // }, - // }}, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Create.json -func ExampleTriggersClient_CreateOrUpdate_triggersCreate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewTriggersClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", armdatafactory.TriggerResource{ - Properties: &armdatafactory.ScheduleTrigger{ - Type: to.Ptr("ScheduleTrigger"), - Pipelines: []*armdatafactory.TriggerPipelineReference{ - { - Parameters: map[string]any{ - "OutputBlobNameList": []any{ - "exampleoutput.csv", - }, - }, - PipelineReference: &armdatafactory.PipelineReference{ - Type: to.Ptr(armdatafactory.PipelineReferenceTypePipelineReference), - ReferenceName: to.Ptr("examplePipeline"), - }, - }}, - TypeProperties: &armdatafactory.ScheduleTriggerTypeProperties{ - Recurrence: &armdatafactory.ScheduleTriggerRecurrence{ - EndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:55:13.8441801Z"); return t }()), - Frequency: to.Ptr(armdatafactory.RecurrenceFrequencyMinute), - Interval: to.Ptr[int32](4), - StartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:13.8441801Z"); return t }()), - TimeZone: to.Ptr("UTC"), - }, - }, - }, - }, &armdatafactory.TriggersClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerResource = armdatafactory.TriggerResource{ - // Name: to.Ptr("exampleTrigger"), - // Type: to.Ptr("Microsoft.DataFactory/factories/triggers"), - // Etag: to.Ptr("0a008ad4-0000-0000-0000-5b245c6e0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger"), - // Properties: &armdatafactory.ScheduleTrigger{ - // Type: to.Ptr("ScheduleTrigger"), - // RuntimeState: to.Ptr(armdatafactory.TriggerRuntimeStateStopped), - // Pipelines: []*armdatafactory.TriggerPipelineReference{ - // { - // Parameters: map[string]any{ - // "OutputBlobNameList": []any{ - // "exampleoutput.csv", - // }, - // }, - // PipelineReference: &armdatafactory.PipelineReference{ - // Type: to.Ptr(armdatafactory.PipelineReferenceTypePipelineReference), - // ReferenceName: to.Ptr("examplePipeline"), - // }, - // }}, - // TypeProperties: &armdatafactory.ScheduleTriggerTypeProperties{ - // Recurrence: &armdatafactory.ScheduleTriggerRecurrence{ - // EndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:55:13.8441801Z"); return t}()), - // Frequency: to.Ptr(armdatafactory.RecurrenceFrequencyMinute), - // Interval: to.Ptr[int32](4), - // StartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:13.8441801Z"); return t}()), - // TimeZone: to.Ptr("UTC"), - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Update.json -func ExampleTriggersClient_CreateOrUpdate_triggersUpdate() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewTriggersClient().CreateOrUpdate(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", armdatafactory.TriggerResource{ - Properties: &armdatafactory.ScheduleTrigger{ - Type: to.Ptr("ScheduleTrigger"), - Description: to.Ptr("Example description"), - Pipelines: []*armdatafactory.TriggerPipelineReference{ - { - Parameters: map[string]any{ - "OutputBlobNameList": []any{ - "exampleoutput.csv", - }, - }, - PipelineReference: &armdatafactory.PipelineReference{ - Type: to.Ptr(armdatafactory.PipelineReferenceTypePipelineReference), - ReferenceName: to.Ptr("examplePipeline"), - }, - }}, - TypeProperties: &armdatafactory.ScheduleTriggerTypeProperties{ - Recurrence: &armdatafactory.ScheduleTriggerRecurrence{ - EndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:55:14.905167Z"); return t }()), - Frequency: to.Ptr(armdatafactory.RecurrenceFrequencyMinute), - Interval: to.Ptr[int32](4), - StartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:14.905167Z"); return t }()), - TimeZone: to.Ptr("UTC"), - }, - }, - }, - }, &armdatafactory.TriggersClientCreateOrUpdateOptions{IfMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerResource = armdatafactory.TriggerResource{ - // Name: to.Ptr("exampleTrigger"), - // Type: to.Ptr("Microsoft.DataFactory/factories/triggers"), - // Etag: to.Ptr("0a008dd4-0000-0000-0000-5b245c6f0000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger"), - // Properties: &armdatafactory.ScheduleTrigger{ - // Type: to.Ptr("ScheduleTrigger"), - // Description: to.Ptr("Example description"), - // RuntimeState: to.Ptr(armdatafactory.TriggerRuntimeStateStopped), - // Pipelines: []*armdatafactory.TriggerPipelineReference{ - // { - // Parameters: map[string]any{ - // "OutputBlobNameList": []any{ - // "exampleoutput.csv", - // }, - // }, - // PipelineReference: &armdatafactory.PipelineReference{ - // Type: to.Ptr(armdatafactory.PipelineReferenceTypePipelineReference), - // ReferenceName: to.Ptr("examplePipeline"), - // }, - // }}, - // TypeProperties: &armdatafactory.ScheduleTriggerTypeProperties{ - // Recurrence: &armdatafactory.ScheduleTriggerRecurrence{ - // EndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:55:14.905167Z"); return t}()), - // Frequency: to.Ptr(armdatafactory.RecurrenceFrequencyMinute), - // Interval: to.Ptr[int32](4), - // StartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2018-06-16T00:39:14.905167Z"); return t}()), - // TimeZone: to.Ptr("UTC"), - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Get.json -func ExampleTriggersClient_Get() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewTriggersClient().Get(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", &armdatafactory.TriggersClientGetOptions{IfNoneMatch: nil}) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerResource = armdatafactory.TriggerResource{ - // Name: to.Ptr("exampleTrigger"), - // Type: to.Ptr("Microsoft.DataFactory/factories/triggers"), - // Etag: to.Ptr("1500544f-0000-0200-0000-5cbe09100000"), - // ID: to.Ptr("/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger"), - // Properties: &armdatafactory.ScheduleTrigger{ - // Type: to.Ptr("ScheduleTrigger"), - // RuntimeState: to.Ptr(armdatafactory.TriggerRuntimeStateStopped), - // Pipelines: []*armdatafactory.TriggerPipelineReference{ - // { - // Parameters: map[string]any{ - // "OutputBlobNameList": []any{ - // "exampleoutput.csv", - // }, - // }, - // PipelineReference: &armdatafactory.PipelineReference{ - // Type: to.Ptr(armdatafactory.PipelineReferenceTypePipelineReference), - // ReferenceName: to.Ptr("examplePipeline"), - // }, - // }}, - // TypeProperties: &armdatafactory.ScheduleTriggerTypeProperties{ - // Recurrence: &armdatafactory.ScheduleTriggerRecurrence{ - // EndTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2019-04-22T18:48:52.5281747Z"); return t}()), - // Frequency: to.Ptr(armdatafactory.RecurrenceFrequencyMinute), - // Interval: to.Ptr[int32](4), - // StartTime: to.Ptr(func() time.Time { t, _ := time.Parse(time.RFC3339Nano, "2019-04-22T18:32:52.527912Z"); return t}()), - // TimeZone: to.Ptr("UTC"), - // }, - // }, - // }, - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Delete.json -func ExampleTriggersClient_Delete() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - _, err = clientFactory.NewTriggersClient().Delete(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_SubscribeToEvents.json -func ExampleTriggersClient_BeginSubscribeToEvents() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewTriggersClient().BeginSubscribeToEvents(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - res, err := poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerSubscriptionOperationStatus = armdatafactory.TriggerSubscriptionOperationStatus{ - // Status: to.Ptr(armdatafactory.EventSubscriptionStatusEnabled), - // TriggerName: to.Ptr("exampleTrigger"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_GetEventSubscriptionStatus.json -func ExampleTriggersClient_GetEventSubscriptionStatus() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - res, err := clientFactory.NewTriggersClient().GetEventSubscriptionStatus(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerSubscriptionOperationStatus = armdatafactory.TriggerSubscriptionOperationStatus{ - // Status: to.Ptr(armdatafactory.EventSubscriptionStatusEnabled), - // TriggerName: to.Ptr("exampleTrigger"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_UnsubscribeFromEvents.json -func ExampleTriggersClient_BeginUnsubscribeFromEvents() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewTriggersClient().BeginUnsubscribeFromEvents(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - res, err := poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } - // You could use response here. We use blank identifier for just demo purposes. - _ = res - // If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes. - // res.TriggerSubscriptionOperationStatus = armdatafactory.TriggerSubscriptionOperationStatus{ - // Status: to.Ptr(armdatafactory.EventSubscriptionStatusDisabled), - // TriggerName: to.Ptr("exampleTrigger"), - // } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Start.json -func ExampleTriggersClient_BeginStart() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewTriggersClient().BeginStart(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - _, err = poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } -} - -// Generated from example definition: https://github.com/Azure/azure-rest-api-specs/blob/5c9459305484e0456b4a922e3d31a61e2ddd3c99/specification/datafactory/resource-manager/Microsoft.DataFactory/stable/2018-06-01/examples/Triggers_Stop.json -func ExampleTriggersClient_BeginStop() { - cred, err := azidentity.NewDefaultAzureCredential(nil) - if err != nil { - log.Fatalf("failed to obtain a credential: %v", err) - } - ctx := context.Background() - clientFactory, err := armdatafactory.NewClientFactory("", cred, nil) - if err != nil { - log.Fatalf("failed to create client: %v", err) - } - poller, err := clientFactory.NewTriggersClient().BeginStop(ctx, "exampleResourceGroup", "exampleFactoryName", "exampleTrigger", nil) - if err != nil { - log.Fatalf("failed to finish the request: %v", err) - } - _, err = poller.PollUntilDone(ctx, nil) - if err != nil { - log.Fatalf("failed to pull the result: %v", err) - } -}