Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

azurerm_mssql_database - support 100MB and 500MB for max_size_gb #28334

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 41 additions & 8 deletions internal/services/mssql/mssql_database_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"context"
"fmt"
"log"
"math"
"strings"
"time"

Expand Down Expand Up @@ -389,8 +390,13 @@ func resourceMsSqlDatabaseCreate(d *pluginsdk.ResourceData, meta interface{}) er
if v, ok := d.GetOk("max_size_gb"); ok {
// `max_size_gb` is Computed, so has a value after the first run
if createMode != string(databases.CreateModeOnlineSecondary) && createMode != string(databases.CreateModeSecondary) {
input.Properties.MaxSizeBytes = pointer.To(int64(v.(int)) * 1073741824)
v, err := calculateMaxSizeBytes(v.(float64))
if err != nil {
return err
}
input.Properties.MaxSizeBytes = v
}

// `max_size_gb` only has change if it is configured
if d.HasChange("max_size_gb") && (createMode == string(databases.CreateModeOnlineSecondary) || createMode == string(databases.CreateModeSecondary)) {
return fmt.Errorf("it is not possible to change maximum size nor advised to configure maximum size in secondary create mode for %s", id)
Expand Down Expand Up @@ -784,9 +790,12 @@ func resourceMsSqlDatabaseUpdate(d *pluginsdk.ResourceData, meta interface{}) er

if v, ok := d.GetOk("max_size_gb"); ok {
// `max_size_gb` is Computed, so has a value after the first run
if createMode != string(databases.CreateModeOnlineSecondary) && createMode != string(databases.CreateModeSecondary) {
props.MaxSizeBytes = pointer.To(int64(v.(int)) * 1073741824)
v, err := calculateMaxSizeBytes(v.(float64))
if err != nil {
return err
}
props.MaxSizeBytes = v

// `max_size_gb` only has change if it is configured
if d.HasChange("max_size_gb") && (createMode == string(databases.CreateModeOnlineSecondary) || createMode == string(databases.CreateModeSecondary)) {
return fmt.Errorf("it is not possible to change maximum size nor advised to configure maximum size in secondary create mode for %s", id)
Expand Down Expand Up @@ -1181,9 +1190,8 @@ func resourceMsSqlDatabaseRead(d *pluginsdk.ResourceData, meta interface{}) erro
d.Set("license_type", d.Get("license_type").(string))
}

if props.MaxSizeBytes != nil {
d.Set("max_size_gb", int32((*props.MaxSizeBytes)/int64(1073741824)))
}
// Get it from config as the API returns not the specified value but the maximum value.
d.Set("max_size_gb", d.Get("max_size_gb").(float64))

if props.CurrentServiceObjectiveName != nil {
skuName = *props.CurrentServiceObjectiveName
Expand Down Expand Up @@ -1599,10 +1607,10 @@ func resourceMsSqlDatabaseSchema() map[string]*pluginsdk.Schema {
"short_term_retention_policy": helper.ShortTermRetentionPolicySchema(),

"max_size_gb": {
Type: pluginsdk.TypeInt,
Type: pluginsdk.TypeFloat,
Optional: true,
Computed: true,
ValidateFunc: validation.IntBetween(1, 4096),
ValidateFunc: validation.FloatBetween(0.1, 4096),
},

"min_capacity": {
Expand Down Expand Up @@ -1884,3 +1892,28 @@ func resourceMsSqlDatabaseSchema() map[string]*pluginsdk.Schema {

return resource
}

func calculateMaxSizeBytes(v float64) (*int64, error) {
var maxSizeBytes int64
integer := isPositiveInteger(v)
if integer {
maxSizeBytes = int64(math.Round(v)) * 1073741824
} else {
// When `max_size_gb` is below 1GB, the API only accepts 100MB and 500MB. 100MB is 104857600, and 500MB is 524288000.
// Since 100MB != 0.1 * 1073741824 and 500MB != 0.5 * 1073741824, directly specify the Bytes when `max_size_gb` is specified `0.1` or `0.5`.
switch int64(math.Round(v * 10)) {
case 1:
maxSizeBytes = 104857600
case 5:
maxSizeBytes = 524288000
default:
return nil, fmt.Errorf("`max_size_gb` allows `0.1`, `0.5` and positive integers greater than or equal to 1")
}
}
return pointer.To(maxSizeBytes), nil
}

func isPositiveInteger(num float64) bool {
// Determine whether `num` is a positive integer and >= 1
return num >= 1 && num == math.Floor(num)
}
74 changes: 58 additions & 16 deletions internal/services/mssql/mssql_database_resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,35 @@ func TestAccMsSqlDatabase_basic(t *testing.T) {
})
}

func TestAccMsSqlDatabase_maxSizeGB(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test")
r := MsSqlDatabaseResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.maxSizeGB(data, 0.1),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("max_size_gb"),
{
Config: r.maxSizeGB(data, 0.5),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("max_size_gb"),
{
Config: r.maxSizeGB(data, 1),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("max_size_gb"),
})
}

func TestAccMsSqlDatabase_free(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_mssql_database", "test")
r := MsSqlDatabaseResource{}
Expand Down Expand Up @@ -96,7 +125,7 @@ func TestAccMsSqlDatabase_complete(t *testing.T) {
check.That(data.ResourceName).Key("tags.ENV").HasValue("Test"),
),
},
data.ImportStep("sample_name"),
data.ImportStep("sample_name", "max_size_gb"),
{
Config: r.update(data),
Check: acceptance.ComposeTestCheckFunc(
Expand All @@ -108,7 +137,7 @@ func TestAccMsSqlDatabase_complete(t *testing.T) {
check.That(data.ResourceName).Key("tags.ENV").HasValue("Staging"),
),
},
data.ImportStep("sample_name"),
data.ImportStep("sample_name", "max_size_gb"),
})
}

Expand Down Expand Up @@ -448,56 +477,56 @@ func TestAccMsSqlDatabase_scaleReplicaSet(t *testing.T) {
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "P2"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "GP_Gen5_2"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "BC_Gen5_2"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "GP_Gen5_2"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "S2"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "Basic"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
{
Config: r.scaleReplicaSet(data, "S1"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("sample_name", "license_type"),
data.ImportStep("sample_name", "license_type", "max_size_gb"),
})
}

Expand All @@ -515,7 +544,7 @@ func TestAccMsSqlDatabase_scaleReplicaSetWithFailovergroup(t *testing.T) {
check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"),
),
},
data.ImportStep(),
data.ImportStep("max_size_gb"),
{
Config: r.scaleReplicaSetWithFailovergroup(data, "GP_Gen5_8", 25),
Check: acceptance.ComposeTestCheckFunc(
Expand All @@ -525,7 +554,7 @@ func TestAccMsSqlDatabase_scaleReplicaSetWithFailovergroup(t *testing.T) {
check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_8"),
),
},
data.ImportStep(),
data.ImportStep("max_size_gb"),
{
Config: r.scaleReplicaSetWithFailovergroup(data, "GP_Gen5_2", 5),
Check: acceptance.ComposeTestCheckFunc(
Expand All @@ -535,7 +564,7 @@ func TestAccMsSqlDatabase_scaleReplicaSetWithFailovergroup(t *testing.T) {
check.That(data.ResourceName).Key("sku_name").HasValue("GP_Gen5_2"),
),
},
data.ImportStep(),
data.ImportStep("max_size_gb"),
})
}

Expand Down Expand Up @@ -607,7 +636,7 @@ func TestAccMsSqlDatabase_threatDetectionPolicy(t *testing.T) {
check.That(data.ResourceName).Key("threat_detection_policy.0.email_account_admins").HasValue("Enabled"),
),
},
data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"),
data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key", "max_size_gb"),
{
Config: r.threatDetectionPolicy(data, "Disabled"),
Check: acceptance.ComposeTestCheckFunc(
Expand All @@ -616,7 +645,7 @@ func TestAccMsSqlDatabase_threatDetectionPolicy(t *testing.T) {
check.That(data.ResourceName).Key("threat_detection_policy.0.state").HasValue("Disabled"),
),
},
data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"),
data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key", "max_size_gb"),
})
}

Expand All @@ -634,7 +663,7 @@ func TestAccMsSqlDatabase_threatDetectionPolicyNoStorage(t *testing.T) {
check.That(data.ResourceName).Key("threat_detection_policy.0.storage_endpoint").IsEmpty(),
),
},
data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key"),
data.ImportStep("sample_name", "threat_detection_policy.0.storage_account_access_key", "max_size_gb"),
{
Config: r.threatDetectionPolicy(data, "Enabled"),
Check: acceptance.ComposeTestCheckFunc(
Expand Down Expand Up @@ -2374,3 +2403,16 @@ resource "azurerm_mssql_database" "test" {
}
`, r.template(data), data.RandomInteger)
}

func (r MsSqlDatabaseResource) maxSizeGB(data acceptance.TestData, maxSizeGb float64) string {
return fmt.Sprintf(`
%[1]s

resource "azurerm_mssql_database" "test" {
name = "acctest-db-%[2]d"
server_id = azurerm_mssql_server.test.id
sku_name = "Basic"
max_size_gb = %[3]f
}
`, r.template(data), data.RandomInteger, maxSizeGb)
}
2 changes: 1 addition & 1 deletion website/docs/r/mssql_database.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ The following arguments are supported:

* `max_size_gb` - (Optional) The max size of the database in gigabytes.

~> **NOTE:** This value should not be configured when the `create_mode` is `Secondary` or `OnlineSecondary`, as the sizing of the primary is then used as per [Azure documentation](https://docs.microsoft.com/azure/azure-sql/database/single-database-scale#geo-replicated-database).
~> **NOTE:** This value should not be configured when the `create_mode` is `Secondary` or `OnlineSecondary`, as the sizing of the primary is then used as per [Azure documentation](https://docs.microsoft.com/azure/azure-sql/database/single-database-scale#geo-replicated-database). The value of `max_size_gb` accepts `0.1`, `0.5` and positive integers greater than or equal to 1. `0.1` means `100MB`, and `0.5` means `500MB`.

* `min_capacity` - (Optional) Minimal capacity that database will always have allocated, if not paused. This property is only settable for Serverless databases.

Expand Down
Loading