diff --git a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go new file mode 100644 index 000000000000..fcf9586abdec --- /dev/null +++ b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go @@ -0,0 +1,350 @@ +package machinelearning + +import ( + "context" + "fmt" + "time" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" + storageparse "github.com/hashicorp/terraform-provider-azurerm/internal/services/storage/parse" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type MachineLearningDataStoreFileShare struct{} + +type MachineLearningDataStoreFileShareModel struct { + Name string `tfschema:"name"` + WorkSpaceID string `tfschema:"workspace_id"` + StorageFileShareID string `tfschema:"storage_fileshare_id"` + Description string `tfschema:"description"` + IsDefault bool `tfschema:"is_default"` + ServiceDataIdentity string `tfschema:"service_data_identity"` + AccountKey string `tfschema:"account_key"` + SharedAccessSignature string `tfschema:"shared_access_signature"` + Tags map[string]string `tfschema:"tags"` +} + +func (r MachineLearningDataStoreFileShare) ModelObject() interface{} { + return &MachineLearningDataStoreFileShareModel{} +} + +func (r MachineLearningDataStoreFileShare) ResourceType() string { + return "azurerm_machine_learning_datastore_fileshare" +} + +func (r MachineLearningDataStoreFileShare) IDValidationFunc() pluginsdk.SchemaValidateFunc { + return datastore.ValidateDataStoreID +} + +var _ sdk.ResourceWithUpdate = MachineLearningDataStoreFileShare{} + +func (r MachineLearningDataStoreFileShare) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataStoreName, + }, + + "workspace_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.WorkspaceID, + }, + + "storage_fileshare_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + }, + + "service_data_identity": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datastore.ServiceDataAccessAuthIdentityNone), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), + }, + false), + Default: string(datastore.ServiceDataAccessAuthIdentityNone), + }, + + "account_key": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + ExactlyOneOf: []string{"account_key", "shared_access_signature"}, + }, + + "shared_access_signature": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + AtLeastOneOf: []string{"account_key", "shared_access_signature"}, + }, + + "tags": commonschema.TagsForceNew(), + } +} + +func (r MachineLearningDataStoreFileShare) Attributes() map[string]*schema.Schema { + return map[string]*pluginsdk.Schema{ + "is_default": { + Type: pluginsdk.TypeBool, + Computed: true, + }, + } +} + +func (r MachineLearningDataStoreFileShare) Create() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient + subscriptionId := metadata.Client.Account.SubscriptionId + + var model MachineLearningDataStoreFileShareModel + if err := metadata.Decode(&model); err != nil { + return fmt.Errorf("decoding %+v", err) + } + + workspaceId, err := workspaces.ParseWorkspaceID(model.WorkSpaceID) + if err != nil { + return err + } + + id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, model.Name) + + existing, err := client.Get(ctx, id) + if err != nil { + if !response.WasNotFound(existing.HttpResponse) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + } + } + if !response.WasNotFound(existing.HttpResponse) { + return tf.ImportAsExistsError("azurerm_machine_learning_datastore_fileshare", id.ID()) + } + + fileShareId, err := storageparse.StorageShareResourceManagerID(model.StorageFileShareID) + if err != nil { + return err + } + + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(model.Name), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureFile)), + } + + props := &datastore.AzureFileDatastore{ + AccountName: fileShareId.StorageAccountName, + FileShareName: fileShareId.FileshareName, + Description: utils.String(model.Description), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataIdentity)), + Tags: utils.ToPtr(model.Tags), + } + + accountKey := model.AccountKey + if accountKey != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } + + sasToken := model.SharedAccessSignature + if sasToken != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + datastoreRaw.Properties = props + + _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("creating/updating %s: %+v", id, err) + } + + metadata.SetID(id) + return nil + }, + } +} + +func (r MachineLearningDataStoreFileShare) Update() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient + + id, err := datastore.ParseDataStoreID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + var state MachineLearningDataStoreFileShareModel + if err := metadata.Decode(&state); err != nil { + return err + } + + fileShareId, err := storageparse.StorageShareResourceManagerID(state.StorageFileShareID) + if err != nil { + return err + } + + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(id.DataStoreName), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureFile)), + } + + props := &datastore.AzureFileDatastore{ + AccountName: fileShareId.StorageAccountName, + FileShareName: fileShareId.FileshareName, + Description: utils.String(state.Description), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataIdentity)), + Tags: utils.ToPtr(state.Tags), + } + + accountKey := state.AccountKey + if accountKey != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } + + sasToken := state.SharedAccessSignature + if sasToken != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + datastoreRaw.Properties = props + + _, err = client.CreateOrUpdate(ctx, *id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("updating %s: %+v", id, err) + } + + return nil + }, + } +} + +func (r MachineLearningDataStoreFileShare) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient + subscriptionId := metadata.Client.Account.SubscriptionId + + id, err := datastore.ParseDataStoreID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return metadata.MarkAsGone(id) + } + return fmt.Errorf("reading %s: %+v", *id, err) + } + + workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) + model := MachineLearningDataStoreFileShareModel{ + Name: *resp.Model.Name, + WorkSpaceID: workspaceId.ID(), + } + + data := resp.Model.Properties.(datastore.AzureFileDatastore) + serviceDataIdentity := "" + if v := data.ServiceDataAccessAuthIdentity; v != nil { + serviceDataIdentity = string(*v) + } + model.ServiceDataIdentity = serviceDataIdentity + + fileShareId := storageparse.NewStorageShareResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, data.AccountName, "default", data.FileShareName) + model.StorageFileShareID = fileShareId.ID() + model.IsDefault = *data.IsDefault + + if v, ok := metadata.ResourceData.GetOk("account_key"); ok { + if v.(string) != "" { + model.AccountKey = v.(string) + } + } + + if v, ok := metadata.ResourceData.GetOk("shared_access_signature"); ok { + if v.(string) != "" { + model.SharedAccessSignature = v.(string) + } + } + + desc := "" + if v := data.Description; v != nil { + desc = *v + } + model.Description = desc + + if data.Tags != nil { + model.Tags = *data.Tags + } + + return metadata.Encode(&model) + }, + } +} + +func (r MachineLearningDataStoreFileShare) Delete() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient + + id, err := datastore.ParseDataStoreID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + if _, err := client.Delete(ctx, *id); err != nil { + return fmt.Errorf("deleting %s: %+v", *id, err) + } + + return nil + }, + } +} diff --git a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go new file mode 100644 index 000000000000..40b16af69530 --- /dev/null +++ b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go @@ -0,0 +1,272 @@ +package machinelearning_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type MachineLearningDataStoreFileShare struct{} + +func TestAccMachineLearningDataStoreFileShare_accountKey(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key"), + }) +} + +func TestAccMachineLearningDataStoreFileShare_sasToken(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareSas(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("shared_access_signature"), + }) +} + +func TestAccMachineLearningDataStoreFileShare_Update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key"), + { + Config: r.fileShareSas(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key", "shared_access_signature"), + }) +} + +func TestAccMachineLearningDataStoreFileShare_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (r MachineLearningDataStoreFileShare) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + dataStoreClient := client.MachineLearning.DatastoreClient + id, err := datastore.ParseDataStoreID(state.ID) + if err != nil { + return nil, err + } + + resp, err := dataStoreClient.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Machine Learning Data Store File Share %q: %+v", state.ID, err) + } + + return utils.Bool(resp.Model.Properties != nil), nil +} + +func (r MachineLearningDataStoreFileShare) fileShareAccountKey(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_share" "test" { + name = "accfs%[2]d" + storage_account_name = azurerm_storage_account.test.name + quota = 1 +} + +resource "azurerm_machine_learning_datastore_fileshare" "test" { + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_fileshare_id = azurerm_storage_share.test.resource_manager_id + account_key = azurerm_storage_account.test.primary_access_key +} +`, template, data.RandomInteger) +} + +func (r MachineLearningDataStoreFileShare) fileShareSas(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_share" "test" { + name = "accfs%[2]d" + storage_account_name = azurerm_storage_account.test.name + quota = 1 +} + +data "azurerm_storage_account_sas" "test" { + connection_string = azurerm_storage_account.test.primary_connection_string + https_only = true + signed_version = "2019-10-10" + + resource_types { + service = true + container = true + object = true + } + + services { + blob = true + queue = false + table = false + file = true + } + + start = "2022-01-01T06:17:07Z" + expiry = "2024-12-23T06:17:07Z" + + permissions { + read = true + write = true + delete = false + list = false + add = true + create = true + update = false + process = false + tag = false + filter = false + } +} + +resource "azurerm_machine_learning_datastore_fileshare" "test" { + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_fileshare_id = azurerm_storage_share.test.resource_manager_id + shared_access_signature = data.azurerm_storage_account_sas.test.sas +} + + + + +`, template, data.RandomInteger) +} + +func (r MachineLearningDataStoreFileShare) requiresImport(data acceptance.TestData) string { + template := r.fileShareAccountKey(data) + return fmt.Sprintf(` +%s + +resource "azurerm_machine_learning_datastore_fileshare" "import" { + name = azurerm_machine_learning_datastore_fileshare.test.name + workspace_id = azurerm_machine_learning_datastore_fileshare.test.workspace_id + storage_fileshare_id = azurerm_machine_learning_datastore_fileshare.test.storage_fileshare_id + account_key = azurerm_machine_learning_datastore_fileshare.test.account_key +} + +`, template) +} + +func (r MachineLearningDataStoreFileShare) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = false + purge_soft_deleted_keys_on_destroy = false + } + resource_group { + prevent_deletion_if_contains_resources = false + } + + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-ml-%[1]d" + location = "%[2]s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestai-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_key_vault" "test" { + name = "acctestvault%[3]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + purge_protection_enabled = true +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "Create", + "Get", + "Delete", + "Purge", + ] +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[4]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_machine_learning_workspace" "test" { + name = "acctest-MLW-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_insights_id = azurerm_application_insights.test.id + key_vault_id = azurerm_key_vault.test.id + storage_account_id = azurerm_storage_account.test.id + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomIntOfLength(15)) +} diff --git a/internal/services/machinelearning/registration.go b/internal/services/machinelearning/registration.go index b393ce684419..6d3fbe4d1f57 100644 --- a/internal/services/machinelearning/registration.go +++ b/internal/services/machinelearning/registration.go @@ -52,5 +52,6 @@ func (r Registration) DataSources() []sdk.DataSource { func (r Registration) Resources() []sdk.Resource { return []sdk.Resource{ MachineLearningDataStoreBlobStorage{}, + MachineLearningDataStoreFileShare{}, } } diff --git a/website/docs/r/machine_learning_datastore_fileshare.html.markdown b/website/docs/r/machine_learning_datastore_fileshare.html.markdown new file mode 100644 index 000000000000..25e0fd8b112e --- /dev/null +++ b/website/docs/r/machine_learning_datastore_fileshare.html.markdown @@ -0,0 +1,121 @@ +--- +subcategory: "Machine Learning" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_machine_learning_datastore_fileshare" +description: |- + Manages a Machine Learning File Sharee DataStore. +--- + +# azurerm_machine_learning_datastore_fileshare + +Manages a Machine Learning File Share DataStore. + +## Example Usage with Azure File Share + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_application_insights" "example" { + name = "workspace-example-ai" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + application_type = "web" +} + +resource "azurerm_key_vault" "example" { + name = "workspaceexamplekeyvault" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" +} + +resource "azurerm_storage_account" "example" { + name = "workspacestorageaccount" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_machine_learning_workspace" "example" { + name = "example-workspace" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + application_insights_id = azurerm_application_insights.example.id + key_vault_id = azurerm_key_vault.example.id + storage_account_id = azurerm_storage_account.example.id + + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_storage_share" "example" { + name = "example" + storage_account_name = azurerm_storage_account.example.name + quota = 1 +} + +resource "azurerm_machine_learning_datastore_fileshare" "example" { + name = "example-datastore" + workspace_id = azurerm_machine_learning_workspace.example.id + storage_filesahare_id = azurerm_storage_share.test.resource_manager_id + account_key = azurerm_storage_account.example.primary_access_key +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. + +* `workspace_id` - (Required) The ID of the Machine Learning Workspace. Changing this forces a new Machine Learning DataStore to be created. + +* `storage_fileshare_id` - (Required) The ID of the Storage Account File Share. Changing this forces a new Machine Learning DataStore to be created. + +--- +* `account_key` - (Optional) The access key of the Storage Account. Conflicts with `shared_access_signature`. + +* `shared_access_signature` - (Optional) The Shared Access Signature of the Storage Account. Conflicts with `account_key`. + +* `description` - (Optional) Text used to describe the asset. Changing this forces a new Machine Learning DataStore to be created. + +* `service_data_identity` - (Optional) Specifies which identity to use when retrieving data from the specified source. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. + +* `tags` - (Optional) A mapping of tags which should be assigned to the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Machine Learning DataStore. + +* `is_default` - Indicate whether this Machines Learning DataStore is the default for the Workspace. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Machine Learning DataStore. +* `read` - (Defaults to 5 minutes) Used when retrieving the Machine Learning DataStore. +* `update` - (Defaults to 30 minutes) Used when updating the Machine Learning DataStore. +* `delete` - (Defaults to 30 minutes) Used when deleting the Machine Learning DataStore. + +## Import + +Machine Learning DataStores can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_machine_learning_datastore_fileshare.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.MachineLearningServices/workspaces/mlw1/datastores/datastore1 +```