diff --git a/.teamcity/components/settings.kt b/.teamcity/components/settings.kt index 805760020db0..b7f2bc497813 100644 --- a/.teamcity/components/settings.kt +++ b/.teamcity/components/settings.kt @@ -63,6 +63,15 @@ var serviceTestConfigurationOverrides = mapOf( // Log Analytics Clusters have a max deployments of 2 - parallelism set to 1 or `importTest` fails "loganalytics" to testConfiguration(parallelism = 1), + // Logic uses app service which is only available in certain locations + "logic" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)), + + // MSSQl uses app service which is only available in certain locations + "mssql" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)), + + // MySQL has quota available in certain locations + "mysql" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)), + // netapp has a max of 20 accounts per subscription so lets limit it to 10 to account for broken ones, run Monday, Wednesday, Friday "netapp" to testConfiguration(parallelism = 10, daysOfWeek = "2,4,6"), @@ -82,9 +91,12 @@ var serviceTestConfigurationOverrides = mapOf( // Spring Cloud only allows a max of 10 provisioned "springcloud" to testConfiguration(parallelism = 5), + // Synapse is only available in certain locations + "synapse" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)), + // Currently, we have insufficient quota to actually run these, but there are a few nodes in West Europe, so we'll pin it there for now "vmware" to testConfiguration(parallelism = 3, locationOverride = LocationConfiguration("westeurope", "westus2", "eastus2", false)), // Offset start hour to avoid collision with new App Service, reduce frequency of testing days - "web" to testConfiguration(startHour = 3, daysOfWeek = "2,4,6") + "web" to testConfiguration(startHour = 3, daysOfWeek = "2,4,6", locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)) ) diff --git a/internal/services/datafactory/data_factory.go b/internal/services/datafactory/data_factory.go index 00e1c29031d2..a6896932adbb 100644 --- a/internal/services/datafactory/data_factory.go +++ b/internal/services/datafactory/data_factory.go @@ -465,7 +465,7 @@ func flattenDataFactoryDatasetAzureBlobStorageLocation(input *datafactory.AzureB result := make(map[string]interface{}) if input.Container != nil { - container, dynamicContainerEnabled := flattenDataFactoryExpressionResultType(input.FolderPath) + container, dynamicContainerEnabled := flattenDataFactoryExpressionResultType(input.Container) result["container"] = container result["dynamic_container_enabled"] = dynamicContainerEnabled } diff --git a/internal/services/datafactory/data_factory_dataset_binary_resource.go b/internal/services/datafactory/data_factory_dataset_binary_resource.go index e4c21f2dabaf..f888d708ad0f 100644 --- a/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -134,6 +134,11 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { Required: true, ValidateFunc: validation.StringIsNotEmpty, }, + "dynamic_container_enabled": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, "path": { Type: pluginsdk.TypeString, Optional: true, diff --git a/internal/services/datafactory/data_factory_dataset_binary_resource_test.go b/internal/services/datafactory/data_factory_dataset_binary_resource_test.go index b9e7d6c2e99c..6a0447505723 100644 --- a/internal/services/datafactory/data_factory_dataset_binary_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_binary_resource_test.go @@ -90,6 +90,35 @@ func TestAccDataFactoryDatasetBinary_sftpComplete(t *testing.T) { }) } +func TestAccDataFactoryDatasetBinary_blobDynamicContainer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test") + r := DatasetBinaryResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blobDynamicContainer(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + func (t DatasetBinaryResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := parse.DataSetID(state.ID) if err != nil { @@ -349,3 +378,54 @@ resource "azurerm_data_factory_dataset_binary" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func (DatasetBinaryResource) blobDynamicContainer(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + data_factory_id = azurerm_data_factory.test.id + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_binary" "test" { + name = "acctestds%d" + data_factory_id = azurerm_data_factory.test.id + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + dynamic_container_enabled = true + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go b/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go index 3a7c926fc690..45efbc6ff643 100644 --- a/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go +++ b/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go @@ -106,6 +106,11 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource { Required: true, ValidateFunc: validation.StringIsNotEmpty, }, + "dynamic_container_enabled": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, "path": { Type: pluginsdk.TypeString, Optional: true, diff --git a/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go b/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go index 8f711591caa4..da8fad522933 100644 --- a/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go @@ -134,6 +134,35 @@ func TestAccDataFactoryDatasetDelimitedText_blobFS(t *testing.T) { }) } +func TestAccDataFactoryDatasetDelimitedText_blobDynamicContainer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test") + r := DatasetDelimitedTextResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blobDynamicContainer(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + func (t DatasetDelimitedTextResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := parse.DataSetID(state.ID) if err != nil { @@ -455,7 +484,6 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" { escape_character = "f" first_row_as_header = true null_value = "NULL" - } `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) } @@ -592,3 +620,66 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func (DatasetDelimitedTextResource) blobDynamicContainer(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + data_factory_id = azurerm_data_factory.test.id + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_delimited_text" "test" { + name = "acctestds%d" + data_factory_id = azurerm_data_factory.test.id + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + dynamic_container_enabled = true + path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))" + dynamic_path_enabled = true + filename = "@concat('foo', '.txt')" + dynamic_filename_enabled = true + } + + column_delimiter = "," + row_delimiter = "NEW" + encoding = "UTF-8" + quote_character = "x" + escape_character = "f" + first_row_as_header = true + null_value = "NULL" +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/internal/services/datafactory/data_factory_dataset_json_resource.go b/internal/services/datafactory/data_factory_dataset_json_resource.go index e9924b204152..36b6bd446aa3 100644 --- a/internal/services/datafactory/data_factory_dataset_json_resource.go +++ b/internal/services/datafactory/data_factory_dataset_json_resource.go @@ -108,6 +108,11 @@ func resourceDataFactoryDatasetJSON() *pluginsdk.Resource { Required: true, ValidateFunc: validation.StringIsNotEmpty, }, + "dynamic_container_enabled": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, "path": { Type: pluginsdk.TypeString, Required: true, diff --git a/internal/services/datafactory/data_factory_dataset_json_resource_test.go b/internal/services/datafactory/data_factory_dataset_json_resource_test.go index 82c39ffffecd..0edd79dc2c05 100644 --- a/internal/services/datafactory/data_factory_dataset_json_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_json_resource_test.go @@ -77,6 +77,35 @@ func TestAccDataFactoryDatasetJSON_blob(t *testing.T) { }) } +func TestAccDataFactoryDatasetJSON_blobDynamicContainer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_json", "test") + r := DatasetJSONResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blobDynamicContainer(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + func (t DatasetJSONResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := parse.DataSetID(state.ID) if err != nil { @@ -315,3 +344,58 @@ resource "azurerm_data_factory_dataset_json" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func (DatasetJSONResource) blobDynamicContainer(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + data_factory_id = azurerm_data_factory.test.id + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_json" "test" { + name = "acctestds%d" + data_factory_id = azurerm_data_factory.test.id + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + dynamic_container_enabled = true + path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))" + dynamic_path_enabled = true + filename = "foo.json" + dynamic_filename_enabled = false + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/internal/services/datafactory/data_factory_dataset_parquet_resource.go b/internal/services/datafactory/data_factory_dataset_parquet_resource.go index 329ffcbf60b2..edb1ff178f3a 100644 --- a/internal/services/datafactory/data_factory_dataset_parquet_resource.go +++ b/internal/services/datafactory/data_factory_dataset_parquet_resource.go @@ -85,6 +85,11 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource { Required: true, ValidateFunc: validation.StringIsNotEmpty, }, + "dynamic_container_enabled": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, "path": { Type: pluginsdk.TypeString, Required: true, diff --git a/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go b/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go index d086c54a48f4..7bef46c4d14c 100644 --- a/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go @@ -79,6 +79,35 @@ func TestAccDataFactoryDatasetParquet_blob(t *testing.T) { }) } +func TestAccDataFactoryDatasetParquet_blobDynamicContainer(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_parquet", "test") + r := DatasetParquetResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blobDynamicContainer(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + func (t DatasetParquetResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := parse.DataSetID(state.ID) if err != nil { @@ -312,3 +341,56 @@ resource "azurerm_data_factory_dataset_parquet" "test" { } `, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) } + +func (DatasetParquetResource) blobDynamicContainer(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_storage_account" "test" { + name = "acctestdf%s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_storage_container" "test" { + name = "content" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + + +resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { + name = "acctestlsblob%d" + data_factory_id = azurerm_data_factory.test.id + connection_string = azurerm_storage_account.test.primary_connection_string +} + +resource "azurerm_data_factory_dataset_parquet" "test" { + name = "acctestds%d" + data_factory_id = azurerm_data_factory.test.id + linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name + + azure_blob_storage_location { + container = azurerm_storage_container.test.name + dynamic_container_enabled = true + path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))" + dynamic_path_enabled = true + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/website/docs/r/data_factory_dataset_binary.html.markdown b/website/docs/r/data_factory_dataset_binary.html.markdown index a6c141eac671..fdad3c27468f 100644 --- a/website/docs/r/data_factory_dataset_binary.html.markdown +++ b/website/docs/r/data_factory_dataset_binary.html.markdown @@ -110,6 +110,8 @@ A `azure_blob_storage_location` block supports the following: * `filename` - (Optional) The filename of the file in the blob container. +* `dynamic_container_enabled` - (Optional) Is the `container` using dynamic expression, function or system variables? Defaults to `false`. + * `dynamic_path_enabled` - (Optional) Is the `path` using dynamic expression, function or system variables? Defaults to `false`. * `dynamic_filename_enabled` - (Optional) Is the `filename` using dynamic expression, function or system variables? Defaults to `false`. diff --git a/website/docs/r/data_factory_dataset_delimited_text.html.markdown b/website/docs/r/data_factory_dataset_delimited_text.html.markdown index 01df903bb64b..ccc2b5afc01e 100644 --- a/website/docs/r/data_factory_dataset_delimited_text.html.markdown +++ b/website/docs/r/data_factory_dataset_delimited_text.html.markdown @@ -132,6 +132,8 @@ An `azure_blob_storage_location` block supports the following: * `filename` - (Optional) The filename of the file. +* `dynamic_container_enabled` - (Optional) Is the `container` using dynamic expression, function or system variables? Defaults to `false`. + * `dynamic_path_enabled` - (Optional) Is the `path` using dynamic expression, function or system variables? Defaults to `false`. * `dynamic_filename_enabled` - (Optional) Is the `filename` using dynamic expression, function or system variables? Defaults to `false`. diff --git a/website/docs/r/data_factory_dataset_json.html.markdown b/website/docs/r/data_factory_dataset_json.html.markdown index df961edfe334..46e261c1e889 100644 --- a/website/docs/r/data_factory_dataset_json.html.markdown +++ b/website/docs/r/data_factory_dataset_json.html.markdown @@ -113,6 +113,8 @@ A `azure_blob_storage_location` block supports the following: * `filename` - (Required) The filename of the file on the web server. +* `dynamic_container_enabled` - (Optional) Is the `container` using dynamic expression, function or system variables? Defaults to `false`. + * `dynamic_path_enabled` - (Optional) Is the `path` using dynamic expression, function or system variables? Defaults to `false`. * `dynamic_filename_enabled` - (Optional) Is the `filename` using dynamic expression, function or system variables? Defaults to `false`. diff --git a/website/docs/r/data_factory_dataset_parquet.html.markdown b/website/docs/r/data_factory_dataset_parquet.html.markdown index 289bf514a461..b30f3b910166 100644 --- a/website/docs/r/data_factory_dataset_parquet.html.markdown +++ b/website/docs/r/data_factory_dataset_parquet.html.markdown @@ -110,6 +110,8 @@ A `azure_blob_storage_location` block supports the following: * `filename` - (Required) The filename of the file on the web server. +* `dynamic_container_enabled` - (Optional) Is the `container` using dynamic expression, function or system variables? Defaults to `false`. + * `dynamic_path_enabled` - (Optional) Is the `path` using dynamic expression, function or system variables? Defaults to `false`. * `dynamic_filename_enabled` - (Optional) Is the `filename` using dynamic expression, function or system variables? Defaults to `false`.