Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test Fixes - Swapping regions based on quota #16514

Merged
merged 4 commits into from
Apr 25, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion .teamcity/components/settings.kt
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,15 @@ var serviceTestConfigurationOverrides = mapOf(
// Log Analytics Clusters have a max deployments of 2 - parallelism set to 1 or `importTest` fails
"loganalytics" to testConfiguration(parallelism = 1),

// Logic uses app service which is only available in certain locations
"logic" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)),

// MSSQl uses app service which is only available in certain locations
"mssql" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)),

// MySQL has quota available in certain locations
"mysql" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)),

// netapp has a max of 20 accounts per subscription so lets limit it to 10 to account for broken ones, run Monday, Wednesday, Friday
"netapp" to testConfiguration(parallelism = 10, daysOfWeek = "2,4,6"),

Expand All @@ -82,9 +91,12 @@ var serviceTestConfigurationOverrides = mapOf(
// Spring Cloud only allows a max of 10 provisioned
"springcloud" to testConfiguration(parallelism = 5),

// Synapse is only available in certain locations
"synapse" to testConfiguration(locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false)),

// Currently, we have insufficient quota to actually run these, but there are a few nodes in West Europe, so we'll pin it there for now
"vmware" to testConfiguration(parallelism = 3, locationOverride = LocationConfiguration("westeurope", "westus2", "eastus2", false)),

// Offset start hour to avoid collision with new App Service, reduce frequency of testing days
"web" to testConfiguration(startHour = 3, daysOfWeek = "2,4,6")
"web" to testConfiguration(startHour = 3, daysOfWeek = "2,4,6", locationOverride = LocationConfiguration("westeurope", "francecentral", "eastus2", false))
)
2 changes: 1 addition & 1 deletion internal/services/datafactory/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ func flattenDataFactoryDatasetAzureBlobStorageLocation(input *datafactory.AzureB
result := make(map[string]interface{})

if input.Container != nil {
container, dynamicContainerEnabled := flattenDataFactoryExpressionResultType(input.FolderPath)
container, dynamicContainerEnabled := flattenDataFactoryExpressionResultType(input.Container)
result["container"] = container
result["dynamic_container_enabled"] = dynamicContainerEnabled
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,11 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource {
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_container_enabled": {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do these new properties need to be added to the tests and docs?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ohh, thanks for that catch!

Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"path": {
Type: pluginsdk.TypeString,
Optional: true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,35 @@ func TestAccDataFactoryDatasetBinary_sftpComplete(t *testing.T) {
})
}

func TestAccDataFactoryDatasetBinary_blobDynamicContainer(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_binary", "test")
r := DatasetBinaryResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blobDynamicContainer(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func (t DatasetBinaryResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
id, err := parse.DataSetID(state.ID)
if err != nil {
Expand Down Expand Up @@ -349,3 +378,54 @@ resource "azurerm_data_factory_dataset_binary" "test" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetBinaryResource) blobDynamicContainer(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}

resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}

resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}

resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}


resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
data_factory_id = azurerm_data_factory.test.id
connection_string = azurerm_storage_account.test.primary_connection_string
}

resource "azurerm_data_factory_dataset_binary" "test" {
name = "acctestds%d"
data_factory_id = azurerm_data_factory.test.id
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = azurerm_storage_container.test.name
dynamic_container_enabled = true
}
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,11 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_container_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"path": {
Type: pluginsdk.TypeString,
Optional: true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,35 @@ func TestAccDataFactoryDatasetDelimitedText_blobFS(t *testing.T) {
})
}

func TestAccDataFactoryDatasetDelimitedText_blobDynamicContainer(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")
r := DatasetDelimitedTextResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blobDynamicContainer(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func (t DatasetDelimitedTextResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
id, err := parse.DataSetID(state.ID)
if err != nil {
Expand Down Expand Up @@ -455,7 +484,6 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
escape_character = "f"
first_row_as_header = true
null_value = "NULL"

}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
Expand Down Expand Up @@ -592,3 +620,66 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blobDynamicContainer(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}

resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}

resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}

resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}


resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
data_factory_id = azurerm_data_factory.test.id
connection_string = azurerm_storage_account.test.primary_connection_string
}

resource "azurerm_data_factory_dataset_delimited_text" "test" {
name = "acctestds%d"
data_factory_id = azurerm_data_factory.test.id
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = azurerm_storage_container.test.name
dynamic_container_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
filename = "@concat('foo', '.txt')"
dynamic_filename_enabled = true
}

column_delimiter = ","
row_delimiter = "NEW"
encoding = "UTF-8"
quote_character = "x"
escape_character = "f"
first_row_as_header = true
null_value = "NULL"
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,11 @@ func resourceDataFactoryDatasetJSON() *pluginsdk.Resource {
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_container_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"path": {
Type: pluginsdk.TypeString,
Required: true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,35 @@ func TestAccDataFactoryDatasetJSON_blob(t *testing.T) {
})
}

func TestAccDataFactoryDatasetJSON_blobDynamicContainer(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_json", "test")
r := DatasetJSONResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blobDynamicContainer(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
{
Config: r.blob(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func (t DatasetJSONResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
id, err := parse.DataSetID(state.ID)
if err != nil {
Expand Down Expand Up @@ -315,3 +344,58 @@ resource "azurerm_data_factory_dataset_json" "test" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetJSONResource) blobDynamicContainer(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}

resource "azurerm_storage_account" "test" {
name = "acctestdf%s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
account_tier = "Standard"
account_replication_type = "GRS"
}

resource "azurerm_storage_container" "test" {
name = "content"
storage_account_name = azurerm_storage_account.test.name
container_access_type = "private"
}

resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}


resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" {
name = "acctestlsblob%d"
data_factory_id = azurerm_data_factory.test.id
connection_string = azurerm_storage_account.test.primary_connection_string
}

resource "azurerm_data_factory_dataset_json" "test" {
name = "acctestds%d"
data_factory_id = azurerm_data_factory.test.id
linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name

azure_blob_storage_location {
container = azurerm_storage_container.test.name
dynamic_container_enabled = true
path = "@concat('foo/bar/',formatDateTime(convertTimeZone(utcnow(),'UTC','W. Europe Standard Time'),'yyyy-MM-dd'))"
dynamic_path_enabled = true
filename = "foo.json"
dynamic_filename_enabled = false
}
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,11 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource {
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"dynamic_container_enabled": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},
"path": {
Type: pluginsdk.TypeString,
Required: true,
Expand Down
Loading