diff --git a/internal/services/storagecache/hpc_cache_blob_nfs_target_resource.go b/internal/services/storagecache/hpc_cache_blob_nfs_target_resource.go index d07295d5b48f..008e3a670d73 100644 --- a/internal/services/storagecache/hpc_cache_blob_nfs_target_resource.go +++ b/internal/services/storagecache/hpc_cache_blob_nfs_target_resource.go @@ -19,6 +19,7 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" + "github.com/hashicorp/terraform-provider-azurerm/utils" ) func resourceHPCCacheBlobNFSTarget() *pluginsdk.Resource { @@ -94,6 +95,18 @@ func resourceHPCCacheBlobNFSTarget() *pluginsdk.Resource { Default: "default", ValidateFunc: validation.StringIsNotEmpty, }, + + "verification_timer_in_seconds": { + Type: pluginsdk.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(1, 31536000), + }, + + "write_back_timer_in_seconds": { + Type: pluginsdk.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(1, 31536000), + }, }, } } @@ -145,6 +158,14 @@ func resourceHPCCacheBlobNFSTargetCreateUpdate(d *pluginsdk.ResourceData, meta i }, } + if v, ok := d.GetOk("verification_timer_in_seconds"); ok { + param.Properties.BlobNfs.VerificationTimer = utils.Int64(int64(v.(int))) + } + + if v, ok := d.GetOk("write_back_timer_in_seconds"); ok { + param.Properties.BlobNfs.WriteBackTimer = utils.Int64(int64(v.(int))) + } + if err := client.CreateOrUpdateThenPoll(ctx, id, param); err != nil { return fmt.Errorf("creating %s: %+v", id, err) } @@ -190,6 +211,8 @@ func resourceHPCCacheBlobNFSTargetRead(d *pluginsdk.ResourceData, meta interface if b := props.BlobNfs; b != nil { storageContainerId = pointer.From(b.Target) usageModel = pointer.From(b.UsageModel) + d.Set("verification_timer_in_seconds", pointer.From(b.VerificationTimer)) + d.Set("write_back_timer_in_seconds", pointer.From(b.WriteBackTimer)) } d.Set("storage_container_id", storageContainerId) d.Set("usage_model", usageModel) diff --git a/internal/services/storagecache/hpc_cache_blob_nfs_target_resource_test.go b/internal/services/storagecache/hpc_cache_blob_nfs_target_resource_test.go index 9adb2e8edb44..5cbf6192f9cd 100644 --- a/internal/services/storagecache/hpc_cache_blob_nfs_target_resource_test.go +++ b/internal/services/storagecache/hpc_cache_blob_nfs_target_resource_test.go @@ -112,14 +112,14 @@ func TestAccHPCCacheBlobNFSTarget_usageModel(t *testing.T) { data.ResourceTest(t, r, []acceptance.TestStep{ { - Config: r.usageModel(data, "READ_WRITE"), + Config: r.usageModelReadWrite(data), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, data.ImportStep(), { - Config: r.usageModel(data, "READ_ONLY"), + Config: r.usageModelReadOnly(data), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), @@ -376,17 +376,35 @@ resource "azurerm_hpc_cache" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomString) } -func (r HPCCacheBlobNFSTargetResource) usageModel(data acceptance.TestData, modelName string) string { +func (r HPCCacheBlobNFSTargetResource) usageModelReadWrite(data acceptance.TestData) string { return fmt.Sprintf(` %s resource "azurerm_hpc_cache_blob_nfs_target" "test" { - name = "acctest-HPCCTGT-%s" - resource_group_name = azurerm_resource_group.test.name - cache_name = azurerm_hpc_cache.test.name - storage_container_id = jsondecode(azurerm_resource_group_template_deployment.storage-containers.output_content).id.value - namespace_path = "/p1" - usage_model = "%s" + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + storage_container_id = jsondecode(azurerm_resource_group_template_deployment.storage-containers.output_content).id.value + namespace_path = "/p1" + usage_model = "READ_WRITE" + verification_timer_in_seconds = 29000 + write_back_timer_in_seconds = 3700 +} +`, r.template(data), data.RandomString) } -`, r.template(data), data.RandomString, modelName) + +func (r HPCCacheBlobNFSTargetResource) usageModelReadOnly(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +resource "azurerm_hpc_cache_blob_nfs_target" "test" { + name = "acctest-HPCCTGT-%s" + resource_group_name = azurerm_resource_group.test.name + cache_name = azurerm_hpc_cache.test.name + storage_container_id = jsondecode(azurerm_resource_group_template_deployment.storage-containers.output_content).id.value + namespace_path = "/p1" + usage_model = "READ_ONLY" + verification_timer_in_seconds = 30000 +} +`, r.template(data), data.RandomString) } diff --git a/website/docs/r/hpc_cache_blob_nfs_target.html.markdown b/website/docs/r/hpc_cache_blob_nfs_target.html.markdown index 2a9c8620fdde..2b6aec0b9851 100644 --- a/website/docs/r/hpc_cache_blob_nfs_target.html.markdown +++ b/website/docs/r/hpc_cache_blob_nfs_target.html.markdown @@ -185,6 +185,10 @@ The following arguments are supported: * `usage_model` - (Required) The type of usage of the HPC Cache Blob NFS Target. Possible values are: `READ_HEAVY_INFREQ`, `READ_HEAVY_CHECK_180`, `READ_ONLY`, `READ_WRITE`, `WRITE_WORKLOAD_15`, `WRITE_AROUND`, `WRITE_WORKLOAD_CHECK_30`, `WRITE_WORKLOAD_CHECK_60` and `WRITE_WORKLOAD_CLOUDWS`. +* `verification_timer_in_seconds` - (Optional) The amount of time the cache waits before it checks the back-end storage for file updates. Possible values are between `1` and `31536000`. + +* `write_back_timer_in_seconds` - (Optional) The amount of time the cache waits after the last file change before it copies the changed file to back-end storage. Possible values are between `1` and `31536000`. + --- * `access_policy_name` - (Optional) The name of the access policy applied to this target. Defaults to `default`.