From 756c9ad9411a6ebb19d4ef6958f224a4018db9eb Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 14:17:43 +0800 Subject: [PATCH 01/22] machine learning data store --- .../services/machinelearning/client/client.go | 6 + ...learning_datastore_blobstorage_resource.go | 271 ++++++++++++++++++ ...ing_datastore_blobstorage_resource_test.go | 238 +++++++++++++++ .../services/machinelearning/registration.go | 11 +- .../validate/datastore_name.go | 19 ++ .../validate/datastore_name_test.go | 66 +++++ vendor/modules.txt | 1 + ...arning_datastore_blogstorage.html.markdown | 126 ++++++++ 8 files changed, 733 insertions(+), 5 deletions(-) create mode 100644 internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go create mode 100644 internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go create mode 100644 internal/services/machinelearning/validate/datastore_name.go create mode 100644 internal/services/machinelearning/validate/datastore_name_test.go create mode 100644 website/docs/r/machine_learning_datastore_blogstorage.html.markdown diff --git a/internal/services/machinelearning/client/client.go b/internal/services/machinelearning/client/client.go index 7669fc0481b8..54ff4f0d97e8 100644 --- a/internal/services/machinelearning/client/client.go +++ b/internal/services/machinelearning/client/client.go @@ -1,6 +1,7 @@ package client import ( + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/machinelearningcomputes" "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces" "github.com/hashicorp/terraform-provider-azurerm/internal/common" @@ -9,6 +10,7 @@ import ( type Client struct { ComputeClient *machinelearningcomputes.MachineLearningComputesClient WorkspacesClient *workspaces.WorkspacesClient + DatastoreClient *datastore.DatastoreClient } func NewClient(o *common.ClientOptions) *Client { @@ -18,8 +20,12 @@ func NewClient(o *common.ClientOptions) *Client { WorkspacesClient := workspaces.NewWorkspacesClientWithBaseURI(o.ResourceManagerEndpoint) o.ConfigureClient(&WorkspacesClient.Client, o.ResourceManagerAuthorizer) + DatastoreClient := datastore.NewDatastoreClientWithBaseURI(o.ResourceManagerEndpoint) + o.ConfigureClient(&DatastoreClient.Client, o.ResourceManagerAuthorizer) + return &Client{ ComputeClient: &ComputeClient, WorkspacesClient: &WorkspacesClient, + DatastoreClient: &DatastoreClient, } } diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go new file mode 100644 index 000000000000..4223cfdfde12 --- /dev/null +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -0,0 +1,271 @@ +package machinelearning + +import ( + "fmt" + "time" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces" + "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" + "github.com/hashicorp/terraform-provider-azurerm/internal/tags" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" + "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +func resourceMachineLearningDataStore() *pluginsdk.Resource { + resource := &pluginsdk.Resource{ + Create: resourceMachineLearningDataStoreCreateOrUpdate, + Read: resourceMachineLearningDataStoreRead, + Update: resourceMachineLearningDataStoreCreateOrUpdate, + Delete: resourceMachineLearningDataStoreDelete, + + Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + _, err := datastore.ParseDataStoreID(id) + return err + }), + + Timeouts: &pluginsdk.ResourceTimeout{ + Create: pluginsdk.DefaultTimeout(30 * time.Minute), + Read: pluginsdk.DefaultTimeout(5 * time.Minute), + Update: pluginsdk.DefaultTimeout(30 * time.Minute), + Delete: pluginsdk.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataStoreName, + }, + + "workspace_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.WorkspaceID, + }, + + "storage_account_name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "container_name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + }, + + "is_default": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, + + "service_data_auth_identity": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datastore.ServiceDataAccessAuthIdentityNone), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), + }, + false), + Default: string(datastore.ServiceDataAccessAuthIdentityNone), + }, + + "account_key": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + ExactlyOneOf: []string{"account_key", "shared_access_signature"}, + }, + + "shared_access_signature": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + AtLeastOneOf: []string{"account_key", "shared_access_signature"}, + }, + + "tags": commonschema.TagsForceNew(), + }, + } + return resource +} + +func resourceMachineLearningDataStoreCreateOrUpdate(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) + if err != nil { + return err + } + + id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, id) + if err != nil { + if !response.WasNotFound(existing.HttpResponse) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + } + } + if !response.WasNotFound(existing.HttpResponse) { + return tf.ImportAsExistsError("azurerm_machine_learning_datastore", id.ID()) + } + } + + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(d.Get("name").(string)), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), + } + + prop, err := expandBlobStorage(d) + if err != nil { + return fmt.Errorf("%+v", err) + } + datastoreRaw.Properties = prop + + _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("creating/updating %s: %+v", id, err) + } + + d.SetId(id.ID()) + return resourceMachineLearningDataStoreRead(d, meta) +} + +func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := datastore.ParseDataStoreID(d.Id()) + if err != nil { + return fmt.Errorf("parsing Machine Learning Data Store ID `%q`: %+v", d.Id(), err) + } + + resp, err := client.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + d.SetId("") + return nil + } + return fmt.Errorf("making Read request on Machine Learning Data Store %q (Resource Group %q): %+v", id.Name, id.ResourceGroupName, err) + } + + workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) + d.Set("name", resp.Model.Name) + d.Set("workspace_id", workspaceId.ID()) + return flattenBlobStorage(d, resp.Model.Properties.(datastore.AzureBlobDatastore)) +} + +func resourceMachineLearningDataStoreDelete(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := datastore.ParseDataStoreID(d.Id()) + if err != nil { + return fmt.Errorf("parsing Machine Learning Workspace Date Store ID `%q`: %+v", d.Id(), err) + } + + if _, err := client.Delete(ctx, *id); err != nil { + return fmt.Errorf("deleting Machine Learning Workspace Date Strore %q (Resource Group %q): %+v", id.Name, id.ResourceGroupName, err) + } + + return nil +} + +func expandBlobStorage(d *pluginsdk.ResourceData) (*datastore.AzureBlobDatastore, error) { + storeProps := &datastore.AzureBlobDatastore{ + AccountName: utils.String(d.Get("storage_account_name").(string)), + ContainerName: utils.String(d.Get("container_name").(string)), + Description: utils.String(d.Get("description").(string)), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), + IsDefault: utils.Bool(d.Get("is_default").(bool)), + Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), + } + + accountKey := d.Get("account_key").(string) + if accountKey != "" { + storeProps.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } + + sasToken := d.Get("shared_access_signature").(string) + if sasToken != "" { + storeProps.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + + return storeProps, nil +} + +func flattenBlobStorage(d *pluginsdk.ResourceData, data datastore.AzureBlobDatastore) error { + d.Set("description", data.Description) + d.Set("is_default", data.IsDefault) + d.Set("service_data_auth_identity", string(*data.ServiceDataAccessAuthIdentity)) + d.Set("storage_account_name", *data.AccountName) + d.Set("container_name", *data.ContainerName) + return flattenAndSetTags(d, *data.Tags) +} + +func expandTags(tagsMap map[string]interface{}) map[string]string { + output := make(map[string]string, len(tagsMap)) + + for i, v := range tagsMap { + // Validate should have ignored this error already + value, _ := tags.TagValueToString(v) + output[i] = value + } + + return output +} + +func flattenAndSetTags(d *pluginsdk.ResourceData, tagMap map[string]string) error { + output := make(map[string]interface{}, len(tagMap)) + for i, v := range tagMap { + output[i] = v + } + + if err := d.Set("tags", output); err != nil { + return fmt.Errorf("setting `tags`: %s", err) + } + + return nil +} diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go new file mode 100644 index 000000000000..89ee271960f9 --- /dev/null +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go @@ -0,0 +1,238 @@ +package machinelearning_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type MachineLearningDataStore struct{} + +func TestAccMachineLearningDataStoreBlobStorage_accountKey(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") + r := MachineLearningDataStore{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blobStorageAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key"), + }) +} + +func TestAccMachineLearningDataStoreBlobStorage_sasToken(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") + r := MachineLearningDataStore{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blobStorageSas(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("shared_access_signature"), + }) +} + +func TestAccMachineLearningDataStoreBlobStorage_Update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") + r := MachineLearningDataStore{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blobStorageAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key"), + { + Config: r.blobStorageSas(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key", "shared_access_signature"), + }) +} + +func (r MachineLearningDataStore) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + dataStoreClient := client.MachineLearning.DatastoreClient + id, err := datastore.ParseDataStoreID(state.ID) + if err != nil { + return nil, err + } + + resp, err := dataStoreClient.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Machine Learning Data Store %q: %+v", state.ID, err) + } + + return utils.Bool(resp.Model.Properties != nil), nil +} + +func (r MachineLearningDataStore) blobStorageAccountKey(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_container" "test" { + name = "acctestcontainer%[2]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +resource "azurerm_machine_learning_datastore_blobstorage" "test" { + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_account_name = azurerm_storage_account.test.name + container_name = azurerm_storage_container.test.name + account_key = azurerm_storage_account.test.primary_access_key +} +`, template, data.RandomInteger) +} + +func (r MachineLearningDataStore) blobStorageSas(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_container" "test" { + name = "acctestcontainer%[2]d" + storage_account_name = azurerm_storage_account.test.name + container_access_type = "private" +} + +data "azurerm_storage_account_sas" "test" { + connection_string = azurerm_storage_account.test.primary_connection_string + https_only = true + signed_version = "2019-10-10" + + resource_types { + service = true + container = true + object = true + } + + services { + blob = true + queue = false + table = false + file = true + } + + start = "2022-01-01T06:17:07Z" + expiry = "2024-12-23T06:17:07Z" + + permissions { + read = true + write = true + delete = false + list = false + add = true + create = true + update = false + process = false + tag = false + filter = false + } +} + +resource "azurerm_machine_learning_datastore_blobstorage" "test" { + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_account_name = azurerm_storage_account.test.name + container_name = azurerm_storage_container.test.name + shared_access_signature = data.azurerm_storage_account_sas.test.sas +} + + +`, template, data.RandomInteger) +} + +func (r MachineLearningDataStore) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = false + purge_soft_deleted_keys_on_destroy = false + } + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-ml-%[1]d" + location = "%[2]s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestai-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_key_vault" "test" { + name = "acctestvault%[3]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + purge_protection_enabled = true +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "Create", + "Get", + "Delete", + "Purge", + ] +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[4]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_machine_learning_workspace" "test" { + name = "acctest-MLW-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_insights_id = azurerm_application_insights.test.id + key_vault_id = azurerm_key_vault.test.id + storage_account_id = azurerm_storage_account.test.id + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomIntOfLength(15)) +} diff --git a/internal/services/machinelearning/registration.go b/internal/services/machinelearning/registration.go index 8167cb77c35d..91e02c46e84d 100644 --- a/internal/services/machinelearning/registration.go +++ b/internal/services/machinelearning/registration.go @@ -34,10 +34,11 @@ func (r Registration) SupportedDataSources() map[string]*pluginsdk.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { return map[string]*pluginsdk.Resource{ - "azurerm_machine_learning_compute_cluster": resourceComputeCluster(), - "azurerm_machine_learning_compute_instance": resourceComputeInstance(), - "azurerm_machine_learning_inference_cluster": resourceAksInferenceCluster(), - "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), - "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), + "azurerm_machine_learning_compute_cluster": resourceComputeCluster(), + "azurerm_machine_learning_compute_instance": resourceComputeInstance(), + "azurerm_machine_learning_inference_cluster": resourceAksInferenceCluster(), + "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), + "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), + "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStore(), } } diff --git a/internal/services/machinelearning/validate/datastore_name.go b/internal/services/machinelearning/validate/datastore_name.go new file mode 100644 index 000000000000..3ae0c87a6b85 --- /dev/null +++ b/internal/services/machinelearning/validate/datastore_name.go @@ -0,0 +1,19 @@ +package validate + +import ( + "fmt" + "regexp" +) + +func DataStoreName(i interface{}, k string) (warnings []string, errors []error) { + v, ok := i.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %q to be string", k)) + return + } + + if matched := regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$`).Match([]byte(v)); !matched { + errors = append(errors, fmt.Errorf("%s must be between 1 and 255 characters, and may only include alphanumeric characters and '-'.", k)) + } + return +} diff --git a/internal/services/machinelearning/validate/datastore_name_test.go b/internal/services/machinelearning/validate/datastore_name_test.go new file mode 100644 index 000000000000..853b1fb09f8e --- /dev/null +++ b/internal/services/machinelearning/validate/datastore_name_test.go @@ -0,0 +1,66 @@ +package validate + +import "testing" + +func TestDataStoreName(t *testing.T) { + testData := []struct { + input string + expected bool + }{ + { + // empty + input: "", + expected: false, + }, + { + // basic example + input: "hello", + expected: true, + }, + { + // cannot start with a hyphen + input: "-hello", + expected: false, + }, + { + // can end with a hyphen + input: "hello-", + expected: true, + }, + { + // cannot contain other special symbols other than hyphens + input: "hello.world", + expected: false, + }, + { + // hyphen in the middle + input: "hello-world", + expected: true, + }, + { + // 2 chars + input: "ab", + expected: true, + }, + { + // 255 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopa", + expected: true, + }, + { + // 256 chars + input: "abcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqrstuvwxyzabcdefghabcdefghijklmnopqa", + expected: false, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q", v.input) + + _, errors := DataStoreName(v.input, "name") + actual := len(errors) == 0 + if v.expected != actual { + t.Fatalf("Expected %t but got %t", v.expected, actual) + } + } +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 75bb8d9d41f3..0612a644acb4 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -291,6 +291,7 @@ github.com/hashicorp/go-azure-sdk/resource-manager/kusto/2022-07-07/clusters github.com/hashicorp/go-azure-sdk/resource-manager/labservices/2022-08-01/labplan github.com/hashicorp/go-azure-sdk/resource-manager/loadtestservice/2021-12-01-preview github.com/hashicorp/go-azure-sdk/resource-manager/loadtestservice/2021-12-01-preview/loadtests +github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/machinelearningcomputes github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces github.com/hashicorp/go-azure-sdk/resource-manager/maintenance/2021-05-01/configurationassignments diff --git a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown new file mode 100644 index 000000000000..34699942542c --- /dev/null +++ b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Machine Learning" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_machine_learning_datastore_blobstorage" +description: |- + Manages a Machine Learning Blob Storage DataStore. +--- + +# azurerm_machine_learning_datastore_blobstorage + +Manages a Machine Learning Blob Storage DataStore. + +## Example Usage with Azure Blob + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_application_insights" "example" { + name = "workspace-example-ai" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + application_type = "web" +} + +resource "azurerm_key_vault" "example" { + name = "workspaceexamplekeyvault" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" +} + +resource "azurerm_storage_account" "example" { + name = "workspacestorageaccount" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_machine_learning_workspace" "example" { + name = "example-workspace" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + application_insights_id = azurerm_application_insights.example.id + key_vault_id = azurerm_key_vault.example.id + storage_account_id = azurerm_storage_account.example.id + + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_storage_container" "example" { + name = "example-container" + storage_account_name = azurerm_storage_account.example.name + container_access_type = "private" +} + +resource "azurerm_machine_learning_datastore_blobstorage" "example" { + name = "example-datastore" + workspace_id = azurerm_machine_learning_workspace.example.id + storage_account_name = azurerm_storage_account.example.name + container_name = azurerm_storage_container.example.name + account_key = azurerm_storage_account.example.primary_access_key +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. + +* `workspace_id` - (Required) The ID of the machine learning workspace. Changing this forces a new Machine Learning DataStore to be created. + +--- +* `storage_account_name` - (Optional) The name of the storage account. Changing this forces a new Machine Learning DataStore to be created. + +* `container_name` - (Optional) The name of the storage account container. Changing this forces a new Machine Learning DataStore to be created. + +* `account_key` - (Optional) The access key of the storage account. Conflicts with `shared_access_signature`. + +* `shared_access_signature` - (Optional) The shared access signature of the storage account. Conflicts with `account_key`. + +* `description` - (Optional) Text used to describe the asset. Changing this forces a new Machine Learning DataStore to be created. + +* `is_default` - (Optional) A bool indicate if datastore is the workspace default datastore. Defaults to `false`. + +~> **Note:** `is_default` can only be set to `true` on update. + +* `service_data_auth_identity` - (Optional) Indicates which identity to use to authenticate service data access to customer's storage. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. + +* `tags` - (Optional) A mapping of tags which should be assigned to the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Machine Learning DataStore. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Machine Learning DataStore. +* `read` - (Defaults to 5 minutes) Used when retrieving the Machine Learning DataStore. +* `update` - (Defaults to 30 minutes) Used when updating the Machine Learning DataStore. +* `delete` - (Defaults to 30 minutes) Used when deleting the Machine Learning DataStore. + +## Import + +Machine Learning DataStores can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_machine_learning_datastore_blobstorage.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.MachineLearningServices/workspaces/mlw1/datastores/datastore1 +``` From 0513273f6fb9209f349376cdacd28c884a0f1fae Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 14:32:22 +0800 Subject: [PATCH 02/22] vendor files --- .../2022-05-01/datastore/README.md | 106 ++++++++ .../2022-05-01/datastore/client.go | 18 ++ .../2022-05-01/datastore/constants.go | 142 ++++++++++ .../2022-05-01/datastore/id_datastore.go | 137 ++++++++++ .../2022-05-01/datastore/id_workspace.go | 124 +++++++++ .../method_createorupdate_autorest.go | 98 +++++++ .../datastore/method_delete_autorest.go | 66 +++++ .../datastore/method_get_autorest.go | 68 +++++ .../datastore/method_list_autorest.go | 245 ++++++++++++++++++ .../datastore/method_listsecrets_autorest.go | 77 ++++++ .../model_accountkeydatastorecredentials.go | 60 +++++ .../model_accountkeydatastoresecrets.go | 41 +++ .../datastore/model_azureblobdatastore.go | 84 ++++++ .../model_azuredatalakegen1datastore.go | 78 ++++++ .../model_azuredatalakegen2datastore.go | 84 ++++++ .../datastore/model_azurefiledatastore.go | 84 ++++++ .../model_certificatedatastorecredentials.go | 76 ++++++ .../model_certificatedatastoresecrets.go | 41 +++ .../2022-05-01/datastore/model_datastore.go | 72 +++++ .../datastore/model_datastorecredentials.go | 80 ++++++ .../datastore/model_datastoreresource.go | 48 ++++ .../datastore/model_datastoresecrets.go | 72 +++++ .../model_nonedatastorecredentials.go | 40 +++ .../model_sasdatastorecredentials.go | 60 +++++ .../datastore/model_sasdatastoresecrets.go | 41 +++ ...el_serviceprincipaldatastorecredentials.go | 74 ++++++ .../model_serviceprincipaldatastoresecrets.go | 41 +++ .../2022-05-01/datastore/predicates.go | 24 ++ .../2022-05-01/datastore/version.go | 12 + 29 files changed, 2193 insertions(+) create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/README.md create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/client.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/constants.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_datastore.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_workspace.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_createorupdate_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_delete_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_get_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_list_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_listsecrets_autorest.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastorecredentials.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastoresecrets.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azureblobdatastore.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen1datastore.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen2datastore.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azurefiledatastore.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastorecredentials.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastoresecrets.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastore.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastorecredentials.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoreresource.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoresecrets.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_nonedatastorecredentials.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastorecredentials.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastoresecrets.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastorecredentials.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastoresecrets.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/predicates.go create mode 100644 vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/version.go diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/README.md b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/README.md new file mode 100644 index 000000000000..c10a4f3c1279 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/README.md @@ -0,0 +1,106 @@ + +## `github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore` Documentation + +The `datastore` SDK allows for interaction with the Azure Resource Manager Service `machinelearningservices` (API Version `2022-05-01`). + +This readme covers example usages, but further information on [using this SDK can be found in the project root](https://github.com/hashicorp/go-azure-sdk/tree/main/docs). + +### Import Path + +```go +import "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" +``` + + +### Client Initialization + +```go +client := datastore.NewDatastoreClientWithBaseURI("https://management.azure.com") +client.Client.Authorizer = authorizer +``` + + +### Example Usage: `DatastoreClient.CreateOrUpdate` + +```go +ctx := context.TODO() +id := datastore.NewDataStoreID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "nameValue") + +payload := datastore.DatastoreResource{ + // ... +} + + +read, err := client.CreateOrUpdate(ctx, id, payload, datastore.DefaultCreateOrUpdateOperationOptions()) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `DatastoreClient.Delete` + +```go +ctx := context.TODO() +id := datastore.NewDataStoreID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "nameValue") + +read, err := client.Delete(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `DatastoreClient.Get` + +```go +ctx := context.TODO() +id := datastore.NewDataStoreID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "nameValue") + +read, err := client.Get(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` + + +### Example Usage: `DatastoreClient.List` + +```go +ctx := context.TODO() +id := datastore.NewWorkspaceID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue") + +// alternatively `client.List(ctx, id, datastore.DefaultListOperationOptions())` can be used to do batched pagination +items, err := client.ListComplete(ctx, id, datastore.DefaultListOperationOptions()) +if err != nil { + // handle the error +} +for _, item := range items { + // do something +} +``` + + +### Example Usage: `DatastoreClient.ListSecrets` + +```go +ctx := context.TODO() +id := datastore.NewDataStoreID("12345678-1234-9876-4563-123456789012", "example-resource-group", "workspaceValue", "nameValue") + +read, err := client.ListSecrets(ctx, id) +if err != nil { + // handle the error +} +if model := read.Model; model != nil { + // do something with the model/response object +} +``` diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/client.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/client.go new file mode 100644 index 000000000000..60a84514389b --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/client.go @@ -0,0 +1,18 @@ +package datastore + +import "github.com/Azure/go-autorest/autorest" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DatastoreClient struct { + Client autorest.Client + baseUri string +} + +func NewDatastoreClientWithBaseURI(endpoint string) DatastoreClient { + return DatastoreClient{ + Client: autorest.NewClientWithUserAgent(userAgent()), + baseUri: endpoint, + } +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/constants.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/constants.go new file mode 100644 index 000000000000..791a12fd4947 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/constants.go @@ -0,0 +1,142 @@ +package datastore + +import "strings" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CredentialsType string + +const ( + CredentialsTypeAccountKey CredentialsType = "AccountKey" + CredentialsTypeCertificate CredentialsType = "Certificate" + CredentialsTypeNone CredentialsType = "None" + CredentialsTypeSas CredentialsType = "Sas" + CredentialsTypeServicePrincipal CredentialsType = "ServicePrincipal" +) + +func PossibleValuesForCredentialsType() []string { + return []string{ + string(CredentialsTypeAccountKey), + string(CredentialsTypeCertificate), + string(CredentialsTypeNone), + string(CredentialsTypeSas), + string(CredentialsTypeServicePrincipal), + } +} + +func parseCredentialsType(input string) (*CredentialsType, error) { + vals := map[string]CredentialsType{ + "accountkey": CredentialsTypeAccountKey, + "certificate": CredentialsTypeCertificate, + "none": CredentialsTypeNone, + "sas": CredentialsTypeSas, + "serviceprincipal": CredentialsTypeServicePrincipal, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := CredentialsType(input) + return &out, nil +} + +type DatastoreType string + +const ( + DatastoreTypeAzureBlob DatastoreType = "AzureBlob" + DatastoreTypeAzureDataLakeGenOne DatastoreType = "AzureDataLakeGen1" + DatastoreTypeAzureDataLakeGenTwo DatastoreType = "AzureDataLakeGen2" + DatastoreTypeAzureFile DatastoreType = "AzureFile" +) + +func PossibleValuesForDatastoreType() []string { + return []string{ + string(DatastoreTypeAzureBlob), + string(DatastoreTypeAzureDataLakeGenOne), + string(DatastoreTypeAzureDataLakeGenTwo), + string(DatastoreTypeAzureFile), + } +} + +func parseDatastoreType(input string) (*DatastoreType, error) { + vals := map[string]DatastoreType{ + "azureblob": DatastoreTypeAzureBlob, + "azuredatalakegen1": DatastoreTypeAzureDataLakeGenOne, + "azuredatalakegen2": DatastoreTypeAzureDataLakeGenTwo, + "azurefile": DatastoreTypeAzureFile, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := DatastoreType(input) + return &out, nil +} + +type SecretsType string + +const ( + SecretsTypeAccountKey SecretsType = "AccountKey" + SecretsTypeCertificate SecretsType = "Certificate" + SecretsTypeSas SecretsType = "Sas" + SecretsTypeServicePrincipal SecretsType = "ServicePrincipal" +) + +func PossibleValuesForSecretsType() []string { + return []string{ + string(SecretsTypeAccountKey), + string(SecretsTypeCertificate), + string(SecretsTypeSas), + string(SecretsTypeServicePrincipal), + } +} + +func parseSecretsType(input string) (*SecretsType, error) { + vals := map[string]SecretsType{ + "accountkey": SecretsTypeAccountKey, + "certificate": SecretsTypeCertificate, + "sas": SecretsTypeSas, + "serviceprincipal": SecretsTypeServicePrincipal, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := SecretsType(input) + return &out, nil +} + +type ServiceDataAccessAuthIdentity string + +const ( + ServiceDataAccessAuthIdentityNone ServiceDataAccessAuthIdentity = "None" + ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity ServiceDataAccessAuthIdentity = "WorkspaceSystemAssignedIdentity" + ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity ServiceDataAccessAuthIdentity = "WorkspaceUserAssignedIdentity" +) + +func PossibleValuesForServiceDataAccessAuthIdentity() []string { + return []string{ + string(ServiceDataAccessAuthIdentityNone), + string(ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), + string(ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), + } +} + +func parseServiceDataAccessAuthIdentity(input string) (*ServiceDataAccessAuthIdentity, error) { + vals := map[string]ServiceDataAccessAuthIdentity{ + "none": ServiceDataAccessAuthIdentityNone, + "workspacesystemassignedidentity": ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity, + "workspaceuserassignedidentity": ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity, + } + if v, ok := vals[strings.ToLower(input)]; ok { + return &v, nil + } + + // otherwise presume it's an undefined value and best-effort it + out := ServiceDataAccessAuthIdentity(input) + return &out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_datastore.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_datastore.go new file mode 100644 index 000000000000..9ee9fa38b166 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_datastore.go @@ -0,0 +1,137 @@ +package datastore + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = DataStoreId{} + +// DataStoreId is a struct representing the Resource ID for a Data Store +type DataStoreId struct { + SubscriptionId string + ResourceGroupName string + WorkspaceName string + Name string +} + +// NewDataStoreID returns a new DataStoreId struct +func NewDataStoreID(subscriptionId string, resourceGroupName string, workspaceName string, name string) DataStoreId { + return DataStoreId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + WorkspaceName: workspaceName, + Name: name, + } +} + +// ParseDataStoreID parses 'input' into a DataStoreId +func ParseDataStoreID(input string) (*DataStoreId, error) { + parser := resourceids.NewParserFromResourceIdType(DataStoreId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := DataStoreId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.WorkspaceName, ok = parsed.Parsed["workspaceName"]; !ok { + return nil, fmt.Errorf("the segment 'workspaceName' was not found in the resource id %q", input) + } + + if id.Name, ok = parsed.Parsed["name"]; !ok { + return nil, fmt.Errorf("the segment 'name' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseDataStoreIDInsensitively parses 'input' case-insensitively into a DataStoreId +// note: this method should only be used for API response data and not user input +func ParseDataStoreIDInsensitively(input string) (*DataStoreId, error) { + parser := resourceids.NewParserFromResourceIdType(DataStoreId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := DataStoreId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.WorkspaceName, ok = parsed.Parsed["workspaceName"]; !ok { + return nil, fmt.Errorf("the segment 'workspaceName' was not found in the resource id %q", input) + } + + if id.Name, ok = parsed.Parsed["name"]; !ok { + return nil, fmt.Errorf("the segment 'name' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateDataStoreID checks that 'input' can be parsed as a Data Store ID +func ValidateDataStoreID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseDataStoreID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Data Store ID +func (id DataStoreId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.MachineLearningServices/workspaces/%s/dataStores/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.WorkspaceName, id.Name) +} + +// Segments returns a slice of Resource ID Segments which comprise this Data Store ID +func (id DataStoreId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftMachineLearningServices", "Microsoft.MachineLearningServices", "Microsoft.MachineLearningServices"), + resourceids.StaticSegment("staticWorkspaces", "workspaces", "workspaces"), + resourceids.UserSpecifiedSegment("workspaceName", "workspaceValue"), + resourceids.StaticSegment("staticDataStores", "dataStores", "dataStores"), + resourceids.UserSpecifiedSegment("name", "nameValue"), + } +} + +// String returns a human-readable description of this Data Store ID +func (id DataStoreId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Workspace Name: %q", id.WorkspaceName), + fmt.Sprintf("Name: %q", id.Name), + } + return fmt.Sprintf("Data Store (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_workspace.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_workspace.go new file mode 100644 index 000000000000..f81c21e0c9ec --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/id_workspace.go @@ -0,0 +1,124 @@ +package datastore + +import ( + "fmt" + "strings" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/resourceids" +) + +var _ resourceids.ResourceId = WorkspaceId{} + +// WorkspaceId is a struct representing the Resource ID for a Workspace +type WorkspaceId struct { + SubscriptionId string + ResourceGroupName string + WorkspaceName string +} + +// NewWorkspaceID returns a new WorkspaceId struct +func NewWorkspaceID(subscriptionId string, resourceGroupName string, workspaceName string) WorkspaceId { + return WorkspaceId{ + SubscriptionId: subscriptionId, + ResourceGroupName: resourceGroupName, + WorkspaceName: workspaceName, + } +} + +// ParseWorkspaceID parses 'input' into a WorkspaceId +func ParseWorkspaceID(input string) (*WorkspaceId, error) { + parser := resourceids.NewParserFromResourceIdType(WorkspaceId{}) + parsed, err := parser.Parse(input, false) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := WorkspaceId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.WorkspaceName, ok = parsed.Parsed["workspaceName"]; !ok { + return nil, fmt.Errorf("the segment 'workspaceName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ParseWorkspaceIDInsensitively parses 'input' case-insensitively into a WorkspaceId +// note: this method should only be used for API response data and not user input +func ParseWorkspaceIDInsensitively(input string) (*WorkspaceId, error) { + parser := resourceids.NewParserFromResourceIdType(WorkspaceId{}) + parsed, err := parser.Parse(input, true) + if err != nil { + return nil, fmt.Errorf("parsing %q: %+v", input, err) + } + + var ok bool + id := WorkspaceId{} + + if id.SubscriptionId, ok = parsed.Parsed["subscriptionId"]; !ok { + return nil, fmt.Errorf("the segment 'subscriptionId' was not found in the resource id %q", input) + } + + if id.ResourceGroupName, ok = parsed.Parsed["resourceGroupName"]; !ok { + return nil, fmt.Errorf("the segment 'resourceGroupName' was not found in the resource id %q", input) + } + + if id.WorkspaceName, ok = parsed.Parsed["workspaceName"]; !ok { + return nil, fmt.Errorf("the segment 'workspaceName' was not found in the resource id %q", input) + } + + return &id, nil +} + +// ValidateWorkspaceID checks that 'input' can be parsed as a Workspace ID +func ValidateWorkspaceID(input interface{}, key string) (warnings []string, errors []error) { + v, ok := input.(string) + if !ok { + errors = append(errors, fmt.Errorf("expected %q to be a string", key)) + return + } + + if _, err := ParseWorkspaceID(v); err != nil { + errors = append(errors, err) + } + + return +} + +// ID returns the formatted Workspace ID +func (id WorkspaceId) ID() string { + fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.MachineLearningServices/workspaces/%s" + return fmt.Sprintf(fmtString, id.SubscriptionId, id.ResourceGroupName, id.WorkspaceName) +} + +// Segments returns a slice of Resource ID Segments which comprise this Workspace ID +func (id WorkspaceId) Segments() []resourceids.Segment { + return []resourceids.Segment{ + resourceids.StaticSegment("staticSubscriptions", "subscriptions", "subscriptions"), + resourceids.SubscriptionIdSegment("subscriptionId", "12345678-1234-9876-4563-123456789012"), + resourceids.StaticSegment("staticResourceGroups", "resourceGroups", "resourceGroups"), + resourceids.ResourceGroupSegment("resourceGroupName", "example-resource-group"), + resourceids.StaticSegment("staticProviders", "providers", "providers"), + resourceids.ResourceProviderSegment("staticMicrosoftMachineLearningServices", "Microsoft.MachineLearningServices", "Microsoft.MachineLearningServices"), + resourceids.StaticSegment("staticWorkspaces", "workspaces", "workspaces"), + resourceids.UserSpecifiedSegment("workspaceName", "workspaceValue"), + } +} + +// String returns a human-readable description of this Workspace ID +func (id WorkspaceId) String() string { + components := []string{ + fmt.Sprintf("Subscription: %q", id.SubscriptionId), + fmt.Sprintf("Resource Group Name: %q", id.ResourceGroupName), + fmt.Sprintf("Workspace Name: %q", id.WorkspaceName), + } + return fmt.Sprintf("Workspace (%s)", strings.Join(components, "\n")) +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_createorupdate_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_createorupdate_autorest.go new file mode 100644 index 000000000000..58eeec74bcad --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_createorupdate_autorest.go @@ -0,0 +1,98 @@ +package datastore + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type CreateOrUpdateOperationResponse struct { + HttpResponse *http.Response + Model *DatastoreResource +} + +type CreateOrUpdateOperationOptions struct { + SkipValidation *bool +} + +func DefaultCreateOrUpdateOperationOptions() CreateOrUpdateOperationOptions { + return CreateOrUpdateOperationOptions{} +} + +func (o CreateOrUpdateOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o CreateOrUpdateOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.SkipValidation != nil { + out["skipValidation"] = *o.SkipValidation + } + + return out +} + +// CreateOrUpdate ... +func (c DatastoreClient) CreateOrUpdate(ctx context.Context, id DataStoreId, input DatastoreResource, options CreateOrUpdateOperationOptions) (result CreateOrUpdateOperationResponse, err error) { + req, err := c.preparerForCreateOrUpdate(ctx, id, input, options) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "CreateOrUpdate", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "CreateOrUpdate", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForCreateOrUpdate(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "CreateOrUpdate", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForCreateOrUpdate prepares the CreateOrUpdate request. +func (c DatastoreClient) preparerForCreateOrUpdate(ctx context.Context, id DataStoreId, input DatastoreResource, options CreateOrUpdateOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(id.ID()), + autorest.WithJSON(input), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForCreateOrUpdate handles the response to the CreateOrUpdate request. The method always +// closes the http.Response Body. +func (c DatastoreClient) responderForCreateOrUpdate(resp *http.Response) (result CreateOrUpdateOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusCreated, http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_delete_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_delete_autorest.go new file mode 100644 index 000000000000..83997bda34da --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_delete_autorest.go @@ -0,0 +1,66 @@ +package datastore + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DeleteOperationResponse struct { + HttpResponse *http.Response +} + +// Delete ... +func (c DatastoreClient) Delete(ctx context.Context, id DataStoreId) (result DeleteOperationResponse, err error) { + req, err := c.preparerForDelete(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "Delete", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "Delete", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForDelete(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "Delete", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForDelete prepares the Delete request. +func (c DatastoreClient) preparerForDelete(ctx context.Context, id DataStoreId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsDelete(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForDelete handles the response to the Delete request. The method always +// closes the http.Response Body. +func (c DatastoreClient) responderForDelete(resp *http.Response) (result DeleteOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusNoContent, http.StatusOK), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_get_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_get_autorest.go new file mode 100644 index 000000000000..a1da4291afc5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_get_autorest.go @@ -0,0 +1,68 @@ +package datastore + +import ( + "context" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type GetOperationResponse struct { + HttpResponse *http.Response + Model *DatastoreResource +} + +// Get ... +func (c DatastoreClient) Get(ctx context.Context, id DataStoreId) (result GetOperationResponse, err error) { + req, err := c.preparerForGet(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "Get", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "Get", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForGet(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "Get", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForGet prepares the Get request. +func (c DatastoreClient) preparerForGet(ctx context.Context, id DataStoreId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(id.ID()), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForGet handles the response to the Get request. The method always +// closes the http.Response Body. +func (c DatastoreClient) responderForGet(resp *http.Response) (result GetOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result.Model), + autorest.ByClosing()) + result.HttpResponse = resp + + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_list_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_list_autorest.go new file mode 100644 index 000000000000..57617d9d22d3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_list_autorest.go @@ -0,0 +1,245 @@ +package datastore + +import ( + "context" + "fmt" + "net/http" + "net/url" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListOperationResponse struct { + HttpResponse *http.Response + Model *[]DatastoreResource + + nextLink *string + nextPageFunc func(ctx context.Context, nextLink string) (ListOperationResponse, error) +} + +type ListCompleteResult struct { + Items []DatastoreResource +} + +func (r ListOperationResponse) HasMore() bool { + return r.nextLink != nil +} + +func (r ListOperationResponse) LoadMore(ctx context.Context) (resp ListOperationResponse, err error) { + if !r.HasMore() { + err = fmt.Errorf("no more pages returned") + return + } + return r.nextPageFunc(ctx, *r.nextLink) +} + +type ListOperationOptions struct { + Count *int64 + IsDefault *bool + Names *[]string + OrderBy *string + OrderByAsc *bool + SearchText *string + Skip *string +} + +func DefaultListOperationOptions() ListOperationOptions { + return ListOperationOptions{} +} + +func (o ListOperationOptions) toHeaders() map[string]interface{} { + out := make(map[string]interface{}) + + return out +} + +func (o ListOperationOptions) toQueryString() map[string]interface{} { + out := make(map[string]interface{}) + + if o.Count != nil { + out["count"] = *o.Count + } + + if o.IsDefault != nil { + out["isDefault"] = *o.IsDefault + } + + if o.Names != nil { + out["names"] = *o.Names + } + + if o.OrderBy != nil { + out["orderBy"] = *o.OrderBy + } + + if o.OrderByAsc != nil { + out["orderByAsc"] = *o.OrderByAsc + } + + if o.SearchText != nil { + out["searchText"] = *o.SearchText + } + + if o.Skip != nil { + out["$skip"] = *o.Skip + } + + return out +} + +// List ... +func (c DatastoreClient) List(ctx context.Context, id WorkspaceId, options ListOperationOptions) (resp ListOperationResponse, err error) { + req, err := c.preparerForList(ctx, id, options) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "List", nil, "Failure preparing request") + return + } + + resp.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "List", resp.HttpResponse, "Failure sending request") + return + } + + resp, err = c.responderForList(resp.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "List", resp.HttpResponse, "Failure responding to request") + return + } + return +} + +// preparerForList prepares the List request. +func (c DatastoreClient) preparerForList(ctx context.Context, id WorkspaceId, options ListOperationOptions) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + for k, v := range options.toQueryString() { + queryParameters[k] = autorest.Encode("query", v) + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithHeaders(options.toHeaders()), + autorest.WithPath(fmt.Sprintf("%s/dataStores", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// preparerForListWithNextLink prepares the List request with the given nextLink token. +func (c DatastoreClient) preparerForListWithNextLink(ctx context.Context, nextLink string) (*http.Request, error) { + uri, err := url.Parse(nextLink) + if err != nil { + return nil, fmt.Errorf("parsing nextLink %q: %+v", nextLink, err) + } + queryParameters := map[string]interface{}{} + for k, v := range uri.Query() { + if len(v) == 0 { + continue + } + val := v[0] + val = autorest.Encode("query", val) + queryParameters[k] = val + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsGet(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(uri.Path), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForList handles the response to the List request. The method always +// closes the http.Response Body. +func (c DatastoreClient) responderForList(resp *http.Response) (result ListOperationResponse, err error) { + type page struct { + Values []DatastoreResource `json:"value"` + NextLink *string `json:"nextLink"` + } + var respObj page + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&respObj), + autorest.ByClosing()) + result.HttpResponse = resp + result.Model = &respObj.Values + result.nextLink = respObj.NextLink + if respObj.NextLink != nil { + result.nextPageFunc = func(ctx context.Context, nextLink string) (result ListOperationResponse, err error) { + req, err := c.preparerForListWithNextLink(ctx, nextLink) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "List", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "List", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForList(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "List", result.HttpResponse, "Failure responding to request") + return + } + + return + } + } + return +} + +// ListComplete retrieves all of the results into a single object +func (c DatastoreClient) ListComplete(ctx context.Context, id WorkspaceId, options ListOperationOptions) (ListCompleteResult, error) { + return c.ListCompleteMatchingPredicate(ctx, id, options, DatastoreResourceOperationPredicate{}) +} + +// ListCompleteMatchingPredicate retrieves all of the results and then applied the predicate +func (c DatastoreClient) ListCompleteMatchingPredicate(ctx context.Context, id WorkspaceId, options ListOperationOptions, predicate DatastoreResourceOperationPredicate) (resp ListCompleteResult, err error) { + items := make([]DatastoreResource, 0) + + page, err := c.List(ctx, id, options) + if err != nil { + err = fmt.Errorf("loading the initial page: %+v", err) + return + } + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + + for page.HasMore() { + page, err = page.LoadMore(ctx) + if err != nil { + err = fmt.Errorf("loading the next page: %+v", err) + return + } + + if page.Model != nil { + for _, v := range *page.Model { + if predicate.Matches(v) { + items = append(items, v) + } + } + } + } + + out := ListCompleteResult{ + Items: items, + } + return out, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_listsecrets_autorest.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_listsecrets_autorest.go new file mode 100644 index 000000000000..e75139f2c3c4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/method_listsecrets_autorest.go @@ -0,0 +1,77 @@ +package datastore + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type ListSecretsOperationResponse struct { + HttpResponse *http.Response + Model *DatastoreSecrets +} + +// ListSecrets ... +func (c DatastoreClient) ListSecrets(ctx context.Context, id DataStoreId) (result ListSecretsOperationResponse, err error) { + req, err := c.preparerForListSecrets(ctx, id) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "ListSecrets", nil, "Failure preparing request") + return + } + + result.HttpResponse, err = c.Client.Send(req, azure.DoRetryWithRegistration(c.Client)) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "ListSecrets", result.HttpResponse, "Failure sending request") + return + } + + result, err = c.responderForListSecrets(result.HttpResponse) + if err != nil { + err = autorest.NewErrorWithError(err, "datastore.DatastoreClient", "ListSecrets", result.HttpResponse, "Failure responding to request") + return + } + + return +} + +// preparerForListSecrets prepares the ListSecrets request. +func (c DatastoreClient) preparerForListSecrets(ctx context.Context, id DataStoreId) (*http.Request, error) { + queryParameters := map[string]interface{}{ + "api-version": defaultApiVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(c.baseUri), + autorest.WithPath(fmt.Sprintf("%s/listSecrets", id.ID())), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// responderForListSecrets handles the response to the ListSecrets request. The method always +// closes the http.Response Body. +func (c DatastoreClient) responderForListSecrets(resp *http.Response) (result ListSecretsOperationResponse, err error) { + err = autorest.Respond( + resp, + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByClosing()) + result.HttpResponse = resp + b, err := ioutil.ReadAll(resp.Body) + if err != nil { + return result, fmt.Errorf("reading response body for DatastoreSecrets: %+v", err) + } + model, err := unmarshalDatastoreSecretsImplementation(b) + if err != nil { + return + } + result.Model = &model + return +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastorecredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastorecredentials.go new file mode 100644 index 000000000000..fc315ccb9ed5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastorecredentials.go @@ -0,0 +1,60 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreCredentials = AccountKeyDatastoreCredentials{} + +type AccountKeyDatastoreCredentials struct { + Secrets DatastoreSecrets `json:"secrets"` + + // Fields inherited from DatastoreCredentials +} + +var _ json.Marshaler = AccountKeyDatastoreCredentials{} + +func (s AccountKeyDatastoreCredentials) MarshalJSON() ([]byte, error) { + type wrapper AccountKeyDatastoreCredentials + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AccountKeyDatastoreCredentials: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AccountKeyDatastoreCredentials: %+v", err) + } + decoded["credentialsType"] = "AccountKey" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AccountKeyDatastoreCredentials: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &AccountKeyDatastoreCredentials{} + +func (s *AccountKeyDatastoreCredentials) UnmarshalJSON(bytes []byte) error { + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling AccountKeyDatastoreCredentials into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["secrets"]; ok { + impl, err := unmarshalDatastoreSecretsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Secrets' for 'AccountKeyDatastoreCredentials': %+v", err) + } + s.Secrets = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastoresecrets.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastoresecrets.go new file mode 100644 index 000000000000..c6cba7a7cff4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_accountkeydatastoresecrets.go @@ -0,0 +1,41 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreSecrets = AccountKeyDatastoreSecrets{} + +type AccountKeyDatastoreSecrets struct { + Key *string `json:"key,omitempty"` + + // Fields inherited from DatastoreSecrets +} + +var _ json.Marshaler = AccountKeyDatastoreSecrets{} + +func (s AccountKeyDatastoreSecrets) MarshalJSON() ([]byte, error) { + type wrapper AccountKeyDatastoreSecrets + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AccountKeyDatastoreSecrets: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AccountKeyDatastoreSecrets: %+v", err) + } + decoded["secretsType"] = "AccountKey" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AccountKeyDatastoreSecrets: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azureblobdatastore.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azureblobdatastore.go new file mode 100644 index 000000000000..dac9a23e6395 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azureblobdatastore.go @@ -0,0 +1,84 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Datastore = AzureBlobDatastore{} + +type AzureBlobDatastore struct { + AccountName *string `json:"accountName,omitempty"` + ContainerName *string `json:"containerName,omitempty"` + Endpoint *string `json:"endpoint,omitempty"` + Protocol *string `json:"protocol,omitempty"` + ServiceDataAccessAuthIdentity *ServiceDataAccessAuthIdentity `json:"serviceDataAccessAuthIdentity,omitempty"` + + // Fields inherited from Datastore + Credentials DatastoreCredentials `json:"credentials"` + Description *string `json:"description,omitempty"` + IsDefault *bool `json:"isDefault,omitempty"` + Properties *map[string]string `json:"properties,omitempty"` + Tags *map[string]string `json:"tags,omitempty"` +} + +var _ json.Marshaler = AzureBlobDatastore{} + +func (s AzureBlobDatastore) MarshalJSON() ([]byte, error) { + type wrapper AzureBlobDatastore + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureBlobDatastore: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureBlobDatastore: %+v", err) + } + decoded["datastoreType"] = "AzureBlob" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureBlobDatastore: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &AzureBlobDatastore{} + +func (s *AzureBlobDatastore) UnmarshalJSON(bytes []byte) error { + type alias AzureBlobDatastore + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into AzureBlobDatastore: %+v", err) + } + + s.AccountName = decoded.AccountName + s.ContainerName = decoded.ContainerName + s.Description = decoded.Description + s.Endpoint = decoded.Endpoint + s.IsDefault = decoded.IsDefault + s.Properties = decoded.Properties + s.Protocol = decoded.Protocol + s.ServiceDataAccessAuthIdentity = decoded.ServiceDataAccessAuthIdentity + s.Tags = decoded.Tags + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling AzureBlobDatastore into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["credentials"]; ok { + impl, err := unmarshalDatastoreCredentialsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Credentials' for 'AzureBlobDatastore': %+v", err) + } + s.Credentials = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen1datastore.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen1datastore.go new file mode 100644 index 000000000000..d7102beac8ce --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen1datastore.go @@ -0,0 +1,78 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Datastore = AzureDataLakeGen1Datastore{} + +type AzureDataLakeGen1Datastore struct { + ServiceDataAccessAuthIdentity *ServiceDataAccessAuthIdentity `json:"serviceDataAccessAuthIdentity,omitempty"` + StoreName string `json:"storeName"` + + // Fields inherited from Datastore + Credentials DatastoreCredentials `json:"credentials"` + Description *string `json:"description,omitempty"` + IsDefault *bool `json:"isDefault,omitempty"` + Properties *map[string]string `json:"properties,omitempty"` + Tags *map[string]string `json:"tags,omitempty"` +} + +var _ json.Marshaler = AzureDataLakeGen1Datastore{} + +func (s AzureDataLakeGen1Datastore) MarshalJSON() ([]byte, error) { + type wrapper AzureDataLakeGen1Datastore + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureDataLakeGen1Datastore: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureDataLakeGen1Datastore: %+v", err) + } + decoded["datastoreType"] = "AzureDataLakeGen1" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureDataLakeGen1Datastore: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &AzureDataLakeGen1Datastore{} + +func (s *AzureDataLakeGen1Datastore) UnmarshalJSON(bytes []byte) error { + type alias AzureDataLakeGen1Datastore + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into AzureDataLakeGen1Datastore: %+v", err) + } + + s.Description = decoded.Description + s.IsDefault = decoded.IsDefault + s.Properties = decoded.Properties + s.ServiceDataAccessAuthIdentity = decoded.ServiceDataAccessAuthIdentity + s.StoreName = decoded.StoreName + s.Tags = decoded.Tags + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling AzureDataLakeGen1Datastore into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["credentials"]; ok { + impl, err := unmarshalDatastoreCredentialsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Credentials' for 'AzureDataLakeGen1Datastore': %+v", err) + } + s.Credentials = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen2datastore.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen2datastore.go new file mode 100644 index 000000000000..644d633d96b7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azuredatalakegen2datastore.go @@ -0,0 +1,84 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Datastore = AzureDataLakeGen2Datastore{} + +type AzureDataLakeGen2Datastore struct { + AccountName string `json:"accountName"` + Endpoint *string `json:"endpoint,omitempty"` + Filesystem string `json:"filesystem"` + Protocol *string `json:"protocol,omitempty"` + ServiceDataAccessAuthIdentity *ServiceDataAccessAuthIdentity `json:"serviceDataAccessAuthIdentity,omitempty"` + + // Fields inherited from Datastore + Credentials DatastoreCredentials `json:"credentials"` + Description *string `json:"description,omitempty"` + IsDefault *bool `json:"isDefault,omitempty"` + Properties *map[string]string `json:"properties,omitempty"` + Tags *map[string]string `json:"tags,omitempty"` +} + +var _ json.Marshaler = AzureDataLakeGen2Datastore{} + +func (s AzureDataLakeGen2Datastore) MarshalJSON() ([]byte, error) { + type wrapper AzureDataLakeGen2Datastore + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureDataLakeGen2Datastore: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureDataLakeGen2Datastore: %+v", err) + } + decoded["datastoreType"] = "AzureDataLakeGen2" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureDataLakeGen2Datastore: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &AzureDataLakeGen2Datastore{} + +func (s *AzureDataLakeGen2Datastore) UnmarshalJSON(bytes []byte) error { + type alias AzureDataLakeGen2Datastore + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into AzureDataLakeGen2Datastore: %+v", err) + } + + s.AccountName = decoded.AccountName + s.Description = decoded.Description + s.Endpoint = decoded.Endpoint + s.Filesystem = decoded.Filesystem + s.IsDefault = decoded.IsDefault + s.Properties = decoded.Properties + s.Protocol = decoded.Protocol + s.ServiceDataAccessAuthIdentity = decoded.ServiceDataAccessAuthIdentity + s.Tags = decoded.Tags + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling AzureDataLakeGen2Datastore into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["credentials"]; ok { + impl, err := unmarshalDatastoreCredentialsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Credentials' for 'AzureDataLakeGen2Datastore': %+v", err) + } + s.Credentials = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azurefiledatastore.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azurefiledatastore.go new file mode 100644 index 000000000000..efbfe501256f --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_azurefiledatastore.go @@ -0,0 +1,84 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ Datastore = AzureFileDatastore{} + +type AzureFileDatastore struct { + AccountName string `json:"accountName"` + Endpoint *string `json:"endpoint,omitempty"` + FileShareName string `json:"fileShareName"` + Protocol *string `json:"protocol,omitempty"` + ServiceDataAccessAuthIdentity *ServiceDataAccessAuthIdentity `json:"serviceDataAccessAuthIdentity,omitempty"` + + // Fields inherited from Datastore + Credentials DatastoreCredentials `json:"credentials"` + Description *string `json:"description,omitempty"` + IsDefault *bool `json:"isDefault,omitempty"` + Properties *map[string]string `json:"properties,omitempty"` + Tags *map[string]string `json:"tags,omitempty"` +} + +var _ json.Marshaler = AzureFileDatastore{} + +func (s AzureFileDatastore) MarshalJSON() ([]byte, error) { + type wrapper AzureFileDatastore + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling AzureFileDatastore: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling AzureFileDatastore: %+v", err) + } + decoded["datastoreType"] = "AzureFile" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling AzureFileDatastore: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &AzureFileDatastore{} + +func (s *AzureFileDatastore) UnmarshalJSON(bytes []byte) error { + type alias AzureFileDatastore + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into AzureFileDatastore: %+v", err) + } + + s.AccountName = decoded.AccountName + s.Description = decoded.Description + s.Endpoint = decoded.Endpoint + s.FileShareName = decoded.FileShareName + s.IsDefault = decoded.IsDefault + s.Properties = decoded.Properties + s.Protocol = decoded.Protocol + s.ServiceDataAccessAuthIdentity = decoded.ServiceDataAccessAuthIdentity + s.Tags = decoded.Tags + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling AzureFileDatastore into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["credentials"]; ok { + impl, err := unmarshalDatastoreCredentialsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Credentials' for 'AzureFileDatastore': %+v", err) + } + s.Credentials = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastorecredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastorecredentials.go new file mode 100644 index 000000000000..f51e1962dfb4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastorecredentials.go @@ -0,0 +1,76 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreCredentials = CertificateDatastoreCredentials{} + +type CertificateDatastoreCredentials struct { + AuthorityUrl *string `json:"authorityUrl,omitempty"` + ClientId string `json:"clientId"` + ResourceUrl *string `json:"resourceUrl,omitempty"` + Secrets DatastoreSecrets `json:"secrets"` + TenantId string `json:"tenantId"` + Thumbprint string `json:"thumbprint"` + + // Fields inherited from DatastoreCredentials +} + +var _ json.Marshaler = CertificateDatastoreCredentials{} + +func (s CertificateDatastoreCredentials) MarshalJSON() ([]byte, error) { + type wrapper CertificateDatastoreCredentials + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling CertificateDatastoreCredentials: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling CertificateDatastoreCredentials: %+v", err) + } + decoded["credentialsType"] = "Certificate" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling CertificateDatastoreCredentials: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &CertificateDatastoreCredentials{} + +func (s *CertificateDatastoreCredentials) UnmarshalJSON(bytes []byte) error { + type alias CertificateDatastoreCredentials + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into CertificateDatastoreCredentials: %+v", err) + } + + s.AuthorityUrl = decoded.AuthorityUrl + s.ClientId = decoded.ClientId + s.ResourceUrl = decoded.ResourceUrl + s.TenantId = decoded.TenantId + s.Thumbprint = decoded.Thumbprint + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling CertificateDatastoreCredentials into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["secrets"]; ok { + impl, err := unmarshalDatastoreSecretsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Secrets' for 'CertificateDatastoreCredentials': %+v", err) + } + s.Secrets = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastoresecrets.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastoresecrets.go new file mode 100644 index 000000000000..d56034ae15cb --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_certificatedatastoresecrets.go @@ -0,0 +1,41 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreSecrets = CertificateDatastoreSecrets{} + +type CertificateDatastoreSecrets struct { + Certificate *string `json:"certificate,omitempty"` + + // Fields inherited from DatastoreSecrets +} + +var _ json.Marshaler = CertificateDatastoreSecrets{} + +func (s CertificateDatastoreSecrets) MarshalJSON() ([]byte, error) { + type wrapper CertificateDatastoreSecrets + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling CertificateDatastoreSecrets: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling CertificateDatastoreSecrets: %+v", err) + } + decoded["secretsType"] = "Certificate" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling CertificateDatastoreSecrets: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastore.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastore.go new file mode 100644 index 000000000000..1313aa7184c0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastore.go @@ -0,0 +1,72 @@ +package datastore + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type Datastore interface { +} + +func unmarshalDatastoreImplementation(input []byte) (Datastore, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling Datastore into map[string]interface: %+v", err) + } + + value, ok := temp["datastoreType"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "AzureBlob") { + var out AzureBlobDatastore + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureBlobDatastore: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "AzureDataLakeGen1") { + var out AzureDataLakeGen1Datastore + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureDataLakeGen1Datastore: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "AzureDataLakeGen2") { + var out AzureDataLakeGen2Datastore + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureDataLakeGen2Datastore: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "AzureFile") { + var out AzureFileDatastore + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AzureFileDatastore: %+v", err) + } + return out, nil + } + + type RawDatastoreImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawDatastoreImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastorecredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastorecredentials.go new file mode 100644 index 000000000000..e42245d15035 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastorecredentials.go @@ -0,0 +1,80 @@ +package datastore + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DatastoreCredentials interface { +} + +func unmarshalDatastoreCredentialsImplementation(input []byte) (DatastoreCredentials, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling DatastoreCredentials into map[string]interface: %+v", err) + } + + value, ok := temp["credentialsType"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "AccountKey") { + var out AccountKeyDatastoreCredentials + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AccountKeyDatastoreCredentials: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Certificate") { + var out CertificateDatastoreCredentials + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into CertificateDatastoreCredentials: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "None") { + var out NoneDatastoreCredentials + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into NoneDatastoreCredentials: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Sas") { + var out SasDatastoreCredentials + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into SasDatastoreCredentials: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "ServicePrincipal") { + var out ServicePrincipalDatastoreCredentials + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ServicePrincipalDatastoreCredentials: %+v", err) + } + return out, nil + } + + type RawDatastoreCredentialsImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawDatastoreCredentialsImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoreresource.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoreresource.go new file mode 100644 index 000000000000..31ea1d51fe74 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoreresource.go @@ -0,0 +1,48 @@ +package datastore + +import ( + "encoding/json" + "fmt" + + "github.com/hashicorp/go-azure-helpers/resourcemanager/systemdata" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DatastoreResource struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Properties Datastore `json:"properties"` + SystemData *systemdata.SystemData `json:"systemData,omitempty"` + Type *string `json:"type,omitempty"` +} + +var _ json.Unmarshaler = &DatastoreResource{} + +func (s *DatastoreResource) UnmarshalJSON(bytes []byte) error { + type alias DatastoreResource + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into DatastoreResource: %+v", err) + } + + s.Id = decoded.Id + s.Name = decoded.Name + s.SystemData = decoded.SystemData + s.Type = decoded.Type + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling DatastoreResource into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["properties"]; ok { + impl, err := unmarshalDatastoreImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Properties' for 'DatastoreResource': %+v", err) + } + s.Properties = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoresecrets.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoresecrets.go new file mode 100644 index 000000000000..4abc05c2fc95 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_datastoresecrets.go @@ -0,0 +1,72 @@ +package datastore + +import ( + "encoding/json" + "fmt" + "strings" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +type DatastoreSecrets interface { +} + +func unmarshalDatastoreSecretsImplementation(input []byte) (DatastoreSecrets, error) { + if input == nil { + return nil, nil + } + + var temp map[string]interface{} + if err := json.Unmarshal(input, &temp); err != nil { + return nil, fmt.Errorf("unmarshaling DatastoreSecrets into map[string]interface: %+v", err) + } + + value, ok := temp["secretsType"].(string) + if !ok { + return nil, nil + } + + if strings.EqualFold(value, "AccountKey") { + var out AccountKeyDatastoreSecrets + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into AccountKeyDatastoreSecrets: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Certificate") { + var out CertificateDatastoreSecrets + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into CertificateDatastoreSecrets: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "Sas") { + var out SasDatastoreSecrets + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into SasDatastoreSecrets: %+v", err) + } + return out, nil + } + + if strings.EqualFold(value, "ServicePrincipal") { + var out ServicePrincipalDatastoreSecrets + if err := json.Unmarshal(input, &out); err != nil { + return nil, fmt.Errorf("unmarshaling into ServicePrincipalDatastoreSecrets: %+v", err) + } + return out, nil + } + + type RawDatastoreSecretsImpl struct { + Type string `json:"-"` + Values map[string]interface{} `json:"-"` + } + out := RawDatastoreSecretsImpl{ + Type: value, + Values: temp, + } + return out, nil + +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_nonedatastorecredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_nonedatastorecredentials.go new file mode 100644 index 000000000000..26499e05a90d --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_nonedatastorecredentials.go @@ -0,0 +1,40 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreCredentials = NoneDatastoreCredentials{} + +type NoneDatastoreCredentials struct { + + // Fields inherited from DatastoreCredentials +} + +var _ json.Marshaler = NoneDatastoreCredentials{} + +func (s NoneDatastoreCredentials) MarshalJSON() ([]byte, error) { + type wrapper NoneDatastoreCredentials + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling NoneDatastoreCredentials: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling NoneDatastoreCredentials: %+v", err) + } + decoded["credentialsType"] = "None" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling NoneDatastoreCredentials: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastorecredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastorecredentials.go new file mode 100644 index 000000000000..73efe6458b40 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastorecredentials.go @@ -0,0 +1,60 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreCredentials = SasDatastoreCredentials{} + +type SasDatastoreCredentials struct { + Secrets DatastoreSecrets `json:"secrets"` + + // Fields inherited from DatastoreCredentials +} + +var _ json.Marshaler = SasDatastoreCredentials{} + +func (s SasDatastoreCredentials) MarshalJSON() ([]byte, error) { + type wrapper SasDatastoreCredentials + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling SasDatastoreCredentials: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling SasDatastoreCredentials: %+v", err) + } + decoded["credentialsType"] = "Sas" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling SasDatastoreCredentials: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &SasDatastoreCredentials{} + +func (s *SasDatastoreCredentials) UnmarshalJSON(bytes []byte) error { + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling SasDatastoreCredentials into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["secrets"]; ok { + impl, err := unmarshalDatastoreSecretsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Secrets' for 'SasDatastoreCredentials': %+v", err) + } + s.Secrets = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastoresecrets.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastoresecrets.go new file mode 100644 index 000000000000..f99c98f83712 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_sasdatastoresecrets.go @@ -0,0 +1,41 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreSecrets = SasDatastoreSecrets{} + +type SasDatastoreSecrets struct { + SasToken *string `json:"sasToken,omitempty"` + + // Fields inherited from DatastoreSecrets +} + +var _ json.Marshaler = SasDatastoreSecrets{} + +func (s SasDatastoreSecrets) MarshalJSON() ([]byte, error) { + type wrapper SasDatastoreSecrets + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling SasDatastoreSecrets: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling SasDatastoreSecrets: %+v", err) + } + decoded["secretsType"] = "Sas" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling SasDatastoreSecrets: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastorecredentials.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastorecredentials.go new file mode 100644 index 000000000000..174520697ae5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastorecredentials.go @@ -0,0 +1,74 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreCredentials = ServicePrincipalDatastoreCredentials{} + +type ServicePrincipalDatastoreCredentials struct { + AuthorityUrl *string `json:"authorityUrl,omitempty"` + ClientId string `json:"clientId"` + ResourceUrl *string `json:"resourceUrl,omitempty"` + Secrets DatastoreSecrets `json:"secrets"` + TenantId string `json:"tenantId"` + + // Fields inherited from DatastoreCredentials +} + +var _ json.Marshaler = ServicePrincipalDatastoreCredentials{} + +func (s ServicePrincipalDatastoreCredentials) MarshalJSON() ([]byte, error) { + type wrapper ServicePrincipalDatastoreCredentials + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ServicePrincipalDatastoreCredentials: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ServicePrincipalDatastoreCredentials: %+v", err) + } + decoded["credentialsType"] = "ServicePrincipal" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ServicePrincipalDatastoreCredentials: %+v", err) + } + + return encoded, nil +} + +var _ json.Unmarshaler = &ServicePrincipalDatastoreCredentials{} + +func (s *ServicePrincipalDatastoreCredentials) UnmarshalJSON(bytes []byte) error { + type alias ServicePrincipalDatastoreCredentials + var decoded alias + if err := json.Unmarshal(bytes, &decoded); err != nil { + return fmt.Errorf("unmarshaling into ServicePrincipalDatastoreCredentials: %+v", err) + } + + s.AuthorityUrl = decoded.AuthorityUrl + s.ClientId = decoded.ClientId + s.ResourceUrl = decoded.ResourceUrl + s.TenantId = decoded.TenantId + + var temp map[string]json.RawMessage + if err := json.Unmarshal(bytes, &temp); err != nil { + return fmt.Errorf("unmarshaling ServicePrincipalDatastoreCredentials into map[string]json.RawMessage: %+v", err) + } + + if v, ok := temp["secrets"]; ok { + impl, err := unmarshalDatastoreSecretsImplementation(v) + if err != nil { + return fmt.Errorf("unmarshaling field 'Secrets' for 'ServicePrincipalDatastoreCredentials': %+v", err) + } + s.Secrets = impl + } + return nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastoresecrets.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastoresecrets.go new file mode 100644 index 000000000000..d2466b0f1dae --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/model_serviceprincipaldatastoresecrets.go @@ -0,0 +1,41 @@ +package datastore + +import ( + "encoding/json" + "fmt" +) + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +var _ DatastoreSecrets = ServicePrincipalDatastoreSecrets{} + +type ServicePrincipalDatastoreSecrets struct { + ClientSecret *string `json:"clientSecret,omitempty"` + + // Fields inherited from DatastoreSecrets +} + +var _ json.Marshaler = ServicePrincipalDatastoreSecrets{} + +func (s ServicePrincipalDatastoreSecrets) MarshalJSON() ([]byte, error) { + type wrapper ServicePrincipalDatastoreSecrets + wrapped := wrapper(s) + encoded, err := json.Marshal(wrapped) + if err != nil { + return nil, fmt.Errorf("marshaling ServicePrincipalDatastoreSecrets: %+v", err) + } + + var decoded map[string]interface{} + if err := json.Unmarshal(encoded, &decoded); err != nil { + return nil, fmt.Errorf("unmarshaling ServicePrincipalDatastoreSecrets: %+v", err) + } + decoded["secretsType"] = "ServicePrincipal" + + encoded, err = json.Marshal(decoded) + if err != nil { + return nil, fmt.Errorf("re-marshaling ServicePrincipalDatastoreSecrets: %+v", err) + } + + return encoded, nil +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/predicates.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/predicates.go new file mode 100644 index 000000000000..05b6e2f85ad3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/predicates.go @@ -0,0 +1,24 @@ +package datastore + +type DatastoreResourceOperationPredicate struct { + Id *string + Name *string + Type *string +} + +func (p DatastoreResourceOperationPredicate) Matches(input DatastoreResource) bool { + + if p.Id != nil && (input.Id == nil && *p.Id != *input.Id) { + return false + } + + if p.Name != nil && (input.Name == nil && *p.Name != *input.Name) { + return false + } + + if p.Type != nil && (input.Type == nil && *p.Type != *input.Type) { + return false + } + + return true +} diff --git a/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/version.go b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/version.go new file mode 100644 index 000000000000..8202e881c899 --- /dev/null +++ b/vendor/github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore/version.go @@ -0,0 +1,12 @@ +package datastore + +import "fmt" + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See NOTICE.txt in the project root for license information. + +const defaultApiVersion = "2022-05-01" + +func userAgent() string { + return fmt.Sprintf("hashicorp/go-azure-sdk/datastore/%s", defaultApiVersion) +} From 8baf93f39783e8616a60d525db193b2bf535b522 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 14:39:30 +0800 Subject: [PATCH 03/22] update doc --- .../r/machine_learning_datastore_blogstorage.html.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown index 34699942542c..24fe7fb09c12 100644 --- a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown +++ b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown @@ -84,9 +84,9 @@ The following arguments are supported: * `workspace_id` - (Required) The ID of the machine learning workspace. Changing this forces a new Machine Learning DataStore to be created. --- -* `storage_account_name` - (Optional) The name of the storage account. Changing this forces a new Machine Learning DataStore to be created. +* `storage_account_name` - (Required) The name of the storage account. Changing this forces a new Machine Learning DataStore to be created. -* `container_name` - (Optional) The name of the storage account container. Changing this forces a new Machine Learning DataStore to be created. +* `container_name` - (Required) The name of the storage account container. Changing this forces a new Machine Learning DataStore to be created. * `account_key` - (Optional) The access key of the storage account. Conflicts with `shared_access_signature`. From 29e4044fce0a2c56e8aeeffd4b514a43be7c08fa Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 14:51:52 +0800 Subject: [PATCH 04/22] optimize expandBlobStorage --- .../machine_learning_datastore_blobstorage_resource.go | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index 4223cfdfde12..dcc1dd372cb6 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -143,10 +143,7 @@ func resourceMachineLearningDataStoreCreateOrUpdate(d *pluginsdk.ResourceData, m Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), } - prop, err := expandBlobStorage(d) - if err != nil { - return fmt.Errorf("%+v", err) - } + prop := expandBlobStorage(d) datastoreRaw.Properties = prop _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) @@ -201,7 +198,7 @@ func resourceMachineLearningDataStoreDelete(d *pluginsdk.ResourceData, meta inte return nil } -func expandBlobStorage(d *pluginsdk.ResourceData) (*datastore.AzureBlobDatastore, error) { +func expandBlobStorage(d *pluginsdk.ResourceData) *datastore.AzureBlobDatastore { storeProps := &datastore.AzureBlobDatastore{ AccountName: utils.String(d.Get("storage_account_name").(string)), ContainerName: utils.String(d.Get("container_name").(string)), @@ -233,7 +230,7 @@ func expandBlobStorage(d *pluginsdk.ResourceData) (*datastore.AzureBlobDatastore } } - return storeProps, nil + return storeProps } func flattenBlobStorage(d *pluginsdk.ResourceData, data datastore.AzureBlobDatastore) error { From 6cd24d67e1c066726e8a0f2ce3d075424f364f63 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 15:32:27 +0800 Subject: [PATCH 05/22] update test --- ...arning_datastore_blobstorage_resource_test.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go index 89ee271960f9..045d0631ef8b 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go @@ -14,11 +14,11 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/utils" ) -type MachineLearningDataStore struct{} +type MachineLearningDataStoreBlobStorage struct{} func TestAccMachineLearningDataStoreBlobStorage_accountKey(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") - r := MachineLearningDataStore{} + r := MachineLearningDataStoreBlobStorage{} data.ResourceTest(t, r, []acceptance.TestStep{ { @@ -33,7 +33,7 @@ func TestAccMachineLearningDataStoreBlobStorage_accountKey(t *testing.T) { func TestAccMachineLearningDataStoreBlobStorage_sasToken(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") - r := MachineLearningDataStore{} + r := MachineLearningDataStoreBlobStorage{} data.ResourceTest(t, r, []acceptance.TestStep{ { @@ -48,7 +48,7 @@ func TestAccMachineLearningDataStoreBlobStorage_sasToken(t *testing.T) { func TestAccMachineLearningDataStoreBlobStorage_Update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") - r := MachineLearningDataStore{} + r := MachineLearningDataStoreBlobStorage{} data.ResourceTest(t, r, []acceptance.TestStep{ { @@ -68,7 +68,7 @@ func TestAccMachineLearningDataStoreBlobStorage_Update(t *testing.T) { }) } -func (r MachineLearningDataStore) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { +func (r MachineLearningDataStoreBlobStorage) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { dataStoreClient := client.MachineLearning.DatastoreClient id, err := datastore.ParseDataStoreID(state.ID) if err != nil { @@ -86,7 +86,7 @@ func (r MachineLearningDataStore) Exists(ctx context.Context, client *clients.Cl return utils.Bool(resp.Model.Properties != nil), nil } -func (r MachineLearningDataStore) blobStorageAccountKey(data acceptance.TestData) string { +func (r MachineLearningDataStoreBlobStorage) blobStorageAccountKey(data acceptance.TestData) string { template := r.template(data) return fmt.Sprintf(` %s @@ -107,7 +107,7 @@ resource "azurerm_machine_learning_datastore_blobstorage" "test" { `, template, data.RandomInteger) } -func (r MachineLearningDataStore) blobStorageSas(data acceptance.TestData) string { +func (r MachineLearningDataStoreBlobStorage) blobStorageSas(data acceptance.TestData) string { template := r.template(data) return fmt.Sprintf(` %s @@ -165,7 +165,7 @@ resource "azurerm_machine_learning_datastore_blobstorage" "test" { `, template, data.RandomInteger) } -func (r MachineLearningDataStore) template(data acceptance.TestData) string { +func (r MachineLearningDataStoreBlobStorage) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { features { From 1d5f2844d2be65b632b107d97173e25b248dc7d3 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 15:57:51 +0800 Subject: [PATCH 06/22] update error message --- .../machine_learning_datastore_blobstorage_resource.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index dcc1dd372cb6..981ab7367202 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -134,7 +134,7 @@ func resourceMachineLearningDataStoreCreateOrUpdate(d *pluginsdk.ResourceData, m } } if !response.WasNotFound(existing.HttpResponse) { - return tf.ImportAsExistsError("azurerm_machine_learning_datastore", id.ID()) + return tf.ImportAsExistsError("azurerm_machine_learning_datastore_blobstorage", id.ID()) } } From 065283ed648e80beae2bea377df6d81da724d883 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 9 Jan 2023 16:42:51 +0800 Subject: [PATCH 07/22] update test case --- ...ing_datastore_blobstorage_resource_test.go | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go index 045d0631ef8b..20088759011d 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go @@ -68,6 +68,21 @@ func TestAccMachineLearningDataStoreBlobStorage_Update(t *testing.T) { }) } +func TestAccMachineLearningDataStoreBlobStorage_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_blobstorage", "test") + r := MachineLearningDataStoreBlobStorage{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blobStorageAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + func (r MachineLearningDataStoreBlobStorage) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { dataStoreClient := client.MachineLearning.DatastoreClient id, err := datastore.ParseDataStoreID(state.ID) @@ -165,6 +180,23 @@ resource "azurerm_machine_learning_datastore_blobstorage" "test" { `, template, data.RandomInteger) } +func (r MachineLearningDataStoreBlobStorage) requiresImport(data acceptance.TestData) string { + template := r.blobStorageAccountKey(data) + return fmt.Sprintf(` + +%s + +resource "azurerm_machine_learning_datastore_blobstorage" "import" { + name = azurerm_machine_learning_datastore_blobstorage.test.name + workspace_id = azurerm_machine_learning_datastore_blobstorage.test.workspace_id + storage_account_name = azurerm_machine_learning_datastore_blobstorage.test.storage_account_name + container_name = azurerm_machine_learning_datastore_blobstorage.test.container_name + account_key = azurerm_machine_learning_datastore_blobstorage.test.account_key +} + +`, template) +} + func (r MachineLearningDataStoreBlobStorage) template(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { From 98422724c160d464bb5347e71aecfd366fd1b0b4 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Tue, 10 Jan 2023 11:28:32 +0800 Subject: [PATCH 08/22] machine learning data store file share --- ...e_learning_datastore_fileshare_resource.go | 242 ++++++++++++++++ ...rning_datastore_fileshare_resource_test.go | 272 ++++++++++++++++++ .../services/machinelearning/registration.go | 1 + ...learning_datastore_fileshare.html.markdown | 126 ++++++++ 4 files changed, 641 insertions(+) create mode 100644 internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go create mode 100644 internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go create mode 100644 website/docs/r/machine_learning_datastore_fileshare.html.markdown diff --git a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go new file mode 100644 index 000000000000..5aa582a03aa4 --- /dev/null +++ b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go @@ -0,0 +1,242 @@ +package machinelearning + +import ( + "fmt" + "time" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces" + "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" + "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +func resourceMachineLearningFileShare() *pluginsdk.Resource { + resource := &pluginsdk.Resource{ + Create: resourceMachineLearningFileShareCreateOrUpdate, + Read: resourceMachineLearningFileShareRead, + Update: resourceMachineLearningFileShareCreateOrUpdate, + Delete: resourceMachineLearningFileShareDelete, + + Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { + _, err := datastore.ParseDataStoreID(id) + return err + }), + + Timeouts: &pluginsdk.ResourceTimeout{ + Create: pluginsdk.DefaultTimeout(30 * time.Minute), + Read: pluginsdk.DefaultTimeout(5 * time.Minute), + Update: pluginsdk.DefaultTimeout(30 * time.Minute), + Delete: pluginsdk.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataStoreName, + }, + + "workspace_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.WorkspaceID, + }, + + "storage_account_name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppress.CaseDifference, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "file_share_name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + }, + + "is_default": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, + + "service_data_auth_identity": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datastore.ServiceDataAccessAuthIdentityNone), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), + }, + false), + Default: string(datastore.ServiceDataAccessAuthIdentityNone), + }, + + "account_key": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + ExactlyOneOf: []string{"account_key", "shared_access_signature"}, + }, + + "shared_access_signature": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + AtLeastOneOf: []string{"account_key", "shared_access_signature"}, + }, + + "tags": commonschema.TagsForceNew(), + }, + } + return resource +} + +func resourceMachineLearningFileShareCreateOrUpdate(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) + if err != nil { + return err + } + + id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) + if d.IsNewResource() { + existing, err := client.Get(ctx, id) + if err != nil { + if !response.WasNotFound(existing.HttpResponse) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + } + } + if !response.WasNotFound(existing.HttpResponse) { + return tf.ImportAsExistsError("azurerm_machine_learning_datastore_fileshare", id.ID()) + } + } + + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(d.Get("name").(string)), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureFile)), + } + + prop := expandFileShare(d) + datastoreRaw.Properties = prop + + _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("creating/updating %s: %+v", id, err) + } + + d.SetId(id.ID()) + return resourceMachineLearningFileShareRead(d, meta) +} + +func resourceMachineLearningFileShareRead(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := datastore.ParseDataStoreID(d.Id()) + if err != nil { + return fmt.Errorf("parsing Machine Learning Data Store ID `%q`: %+v", d.Id(), err) + } + + resp, err := client.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + d.SetId("") + return nil + } + return fmt.Errorf("making Read request on Machine Learning Data Store File Share %q (Resource Group %q): %+v", id.Name, id.ResourceGroupName, err) + } + + workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) + d.Set("name", resp.Model.Name) + d.Set("workspace_id", workspaceId.ID()) + return flattenFileShare(d, resp.Model.Properties.(datastore.AzureFileDatastore)) +} + +func resourceMachineLearningFileShareDelete(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := datastore.ParseDataStoreID(d.Id()) + if err != nil { + return fmt.Errorf("parsing Machine Learning Workspace Date Store ID `%q`: %+v", d.Id(), err) + } + + if _, err := client.Delete(ctx, *id); err != nil { + return fmt.Errorf("deleting Machine Learning Workspace Date Strore File Share %q (Resource Group %q): %+v", id.Name, id.ResourceGroupName, err) + } + + return nil +} + +func expandFileShare(d *pluginsdk.ResourceData) *datastore.AzureFileDatastore { + storeProps := &datastore.AzureFileDatastore{ + AccountName: d.Get("storage_account_name").(string), + FileShareName: d.Get("file_share_name").(string), + Description: utils.String(d.Get("description").(string)), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), + IsDefault: utils.Bool(d.Get("is_default").(bool)), + Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), + } + + accountKey := d.Get("account_key").(string) + if accountKey != "" { + storeProps.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } + + sasToken := d.Get("shared_access_signature").(string) + if sasToken != "" { + storeProps.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + + return storeProps +} + +func flattenFileShare(d *pluginsdk.ResourceData, data datastore.AzureFileDatastore) error { + d.Set("description", data.Description) + d.Set("is_default", data.IsDefault) + d.Set("service_data_auth_identity", string(*data.ServiceDataAccessAuthIdentity)) + d.Set("storage_account_name", data.AccountName) + d.Set("file_share_name", data.FileShareName) + return flattenAndSetTags(d, *data.Tags) +} diff --git a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go new file mode 100644 index 000000000000..71bf6999580b --- /dev/null +++ b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource_test.go @@ -0,0 +1,272 @@ +package machinelearning_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type MachineLearningDataStoreFileShare struct{} + +func TestAccMachineLearningDataStoreFileShare_accountKey(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key"), + }) +} + +func TestAccMachineLearningDataStoreFileShare_sasToken(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareSas(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("shared_access_signature"), + }) +} + +func TestAccMachineLearningDataStoreFileShare_Update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key"), + { + Config: r.fileShareSas(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep("account_key", "shared_access_signature"), + }) +} + +func TestAccMachineLearningDataStoreFileShare_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_machine_learning_datastore_fileshare", "test") + r := MachineLearningDataStoreFileShare{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.fileShareAccountKey(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.RequiresImportErrorStep(r.requiresImport), + }) +} + +func (r MachineLearningDataStoreFileShare) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + dataStoreClient := client.MachineLearning.DatastoreClient + id, err := datastore.ParseDataStoreID(state.ID) + if err != nil { + return nil, err + } + + resp, err := dataStoreClient.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving Machine Learning Data Store File Share %q: %+v", state.ID, err) + } + + return utils.Bool(resp.Model.Properties != nil), nil +} + +func (r MachineLearningDataStoreFileShare) fileShareAccountKey(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_share" "test" { + name = "accfs%[2]d" + storage_account_name = azurerm_storage_account.test.name + quota = 1 +} + +resource "azurerm_machine_learning_datastore_fileshare" "test" { + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_account_name = azurerm_storage_account.test.name + file_share_name = azurerm_storage_share.test.name + account_key = azurerm_storage_account.test.primary_access_key +} +`, template, data.RandomInteger) +} + +func (r MachineLearningDataStoreFileShare) fileShareSas(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_storage_share" "test" { + name = "accfs%[2]d" + storage_account_name = azurerm_storage_account.test.name + quota = 1 +} + +data "azurerm_storage_account_sas" "test" { + connection_string = azurerm_storage_account.test.primary_connection_string + https_only = true + signed_version = "2019-10-10" + + resource_types { + service = true + container = true + object = true + } + + services { + blob = true + queue = false + table = false + file = true + } + + start = "2022-01-01T06:17:07Z" + expiry = "2024-12-23T06:17:07Z" + + permissions { + read = true + write = true + delete = false + list = false + add = true + create = true + update = false + process = false + tag = false + filter = false + } +} + +resource "azurerm_machine_learning_datastore_fileshare" "test" { + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_account_name = azurerm_storage_account.test.name + file_share_name = azurerm_storage_share.test.name + shared_access_signature = data.azurerm_storage_account_sas.test.sas +} + +`, template, data.RandomInteger) +} + +func (r MachineLearningDataStoreFileShare) requiresImport(data acceptance.TestData) string { + template := r.fileShareAccountKey(data) + return fmt.Sprintf(` +%s + +resource "azurerm_machine_learning_datastore_fileshare" "import" { + name = azurerm_machine_learning_datastore_fileshare.test.name + workspace_id = azurerm_machine_learning_datastore_fileshare.test.workspace_id + storage_account_name = azurerm_machine_learning_datastore_fileshare.test.storage_account_name + file_share_name = azurerm_machine_learning_datastore_fileshare.test.file_share_name + account_key = azurerm_machine_learning_datastore_fileshare.test.account_key +} + +`, template) +} + +func (r MachineLearningDataStoreFileShare) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features { + key_vault { + purge_soft_delete_on_destroy = false + purge_soft_deleted_keys_on_destroy = false + } + resource_group { + prevent_deletion_if_contains_resources = false + } + + } +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-ml-%[1]d" + location = "%[2]s" +} + +resource "azurerm_application_insights" "test" { + name = "acctestai-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_type = "web" +} + +resource "azurerm_key_vault" "test" { + name = "acctestvault%[3]s" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + tenant_id = data.azurerm_client_config.current.tenant_id + + sku_name = "standard" + + purge_protection_enabled = true +} + +resource "azurerm_key_vault_access_policy" "test" { + key_vault_id = azurerm_key_vault.test.id + tenant_id = data.azurerm_client_config.current.tenant_id + object_id = data.azurerm_client_config.current.object_id + + key_permissions = [ + "Create", + "Get", + "Delete", + "Purge", + ] +} + +resource "azurerm_storage_account" "test" { + name = "acctestsa%[4]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + account_tier = "Standard" + account_replication_type = "LRS" +} + +resource "azurerm_machine_learning_workspace" "test" { + name = "acctest-MLW-%[1]d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + application_insights_id = azurerm_application_insights.test.id + key_vault_id = azurerm_key_vault.test.id + storage_account_id = azurerm_storage_account.test.id + + identity { + type = "SystemAssigned" + } +} +`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomIntOfLength(15)) +} diff --git a/internal/services/machinelearning/registration.go b/internal/services/machinelearning/registration.go index 91e02c46e84d..7f632e3a6998 100644 --- a/internal/services/machinelearning/registration.go +++ b/internal/services/machinelearning/registration.go @@ -40,5 +40,6 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStore(), + "azurerm_machine_learning_datastore_fileshare": resourceMachineLearningFileShare(), } } diff --git a/website/docs/r/machine_learning_datastore_fileshare.html.markdown b/website/docs/r/machine_learning_datastore_fileshare.html.markdown new file mode 100644 index 000000000000..a848eae1c337 --- /dev/null +++ b/website/docs/r/machine_learning_datastore_fileshare.html.markdown @@ -0,0 +1,126 @@ +--- +subcategory: "Machine Learning" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_machine_learning_datastore_fileshare" +description: |- + Manages a Machine Learning File Sharee DataStore. +--- + +# azurerm_machine_learning_datastore_fileshare + +Manages a Machine Learning File Share DataStore. + +## Example Usage with Azure File Share + +```hcl +provider "azurerm" { + features {} +} + +data "azurerm_client_config" "current" {} + +resource "azurerm_resource_group" "example" { + name = "example-resources" + location = "West Europe" +} + +resource "azurerm_application_insights" "example" { + name = "workspace-example-ai" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + application_type = "web" +} + +resource "azurerm_key_vault" "example" { + name = "workspaceexamplekeyvault" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + tenant_id = data.azurerm_client_config.current.tenant_id + sku_name = "premium" +} + +resource "azurerm_storage_account" "example" { + name = "workspacestorageaccount" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + account_tier = "Standard" + account_replication_type = "GRS" +} + +resource "azurerm_machine_learning_workspace" "example" { + name = "example-workspace" + location = azurerm_resource_group.example.location + resource_group_name = azurerm_resource_group.example.name + application_insights_id = azurerm_application_insights.example.id + key_vault_id = azurerm_key_vault.example.id + storage_account_id = azurerm_storage_account.example.id + + identity { + type = "SystemAssigned" + } +} + +resource "azurerm_storage_share" "example" { + name = "example" + storage_account_name = azurerm_storage_account.example.name + quota = 1 +} + +resource "azurerm_machine_learning_datastore_fileshare" "example" { + name = "example-datastore" + workspace_id = azurerm_machine_learning_workspace.example.id + storage_account_name = azurerm_storage_account.example.name + file_share_name = azurerm_storage_share.example.name + account_key = azurerm_storage_account.example.primary_access_key +} +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) The name of the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. + +* `workspace_id` - (Required) The ID of the machine learning workspace. Changing this forces a new Machine Learning DataStore to be created. + +--- +* `storage_account_name` - (Required) The name of the storage account. Changing this forces a new Machine Learning DataStore to be created. + +* `file_share_name` - (Required) The name of the storage account file share. Changing this forces a new Machine Learning DataStore to be created. + +* `account_key` - (Optional) The access key of the storage account. Conflicts with `shared_access_signature`. + +* `shared_access_signature` - (Optional) The shared access signature of the storage account. Conflicts with `account_key`. + +* `description` - (Optional) Text used to describe the asset. Changing this forces a new Machine Learning DataStore to be created. + +* `is_default` - (Optional) A bool indicate if datastore is the workspace default datastore. Defaults to `false`. + +~> **Note:** `is_default` can only be set to `true` on update. + +* `service_data_auth_identity` - (Optional) Indicates which identity to use to authenticate service data access to customer's storage. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. + +* `tags` - (Optional) A mapping of tags which should be assigned to the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Machine Learning DataStore. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Machine Learning DataStore. +* `read` - (Defaults to 5 minutes) Used when retrieving the Machine Learning DataStore. +* `update` - (Defaults to 30 minutes) Used when updating the Machine Learning DataStore. +* `delete` - (Defaults to 30 minutes) Used when deleting the Machine Learning DataStore. + +## Import + +Machine Learning DataStores can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_machine_learning_datastore_fileshare.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.MachineLearningServices/workspaces/mlw1/datastores/datastore1 +``` From ecd57bc0b72cdf7cff68400abe364ccc97e98c20 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 11 Jan 2023 13:45:27 +0800 Subject: [PATCH 09/22] machine learning data store blog storage --- ...learning_datastore_blobstorage_resource.go | 96 +++++++++---------- .../services/machinelearning/registration.go | 2 +- ...arning_datastore_blogstorage.html.markdown | 12 +-- 3 files changed, 51 insertions(+), 59 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index 981ab7367202..eb1ad2fa242b 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -2,6 +2,7 @@ package machinelearning import ( "fmt" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" "time" "github.com/hashicorp/go-azure-helpers/lang/response" @@ -13,7 +14,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -143,8 +143,37 @@ func resourceMachineLearningDataStoreCreateOrUpdate(d *pluginsdk.ResourceData, m Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), } - prop := expandBlobStorage(d) - datastoreRaw.Properties = prop + props := &datastore.AzureBlobDatastore{ + AccountName: utils.String(d.Get("storage_account_name").(string)), + ContainerName: utils.String(d.Get("container_name").(string)), + Description: utils.String(d.Get("description").(string)), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), + IsDefault: utils.Bool(d.Get("is_default").(bool)), + Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), + } + + accountKey := d.Get("account_key").(string) + if accountKey != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } + + sasToken := d.Get("shared_access_signature").(string) + if sasToken != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + datastoreRaw.Properties = props _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) if err != nil { @@ -163,7 +192,7 @@ func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interf id, err := datastore.ParseDataStoreID(d.Id()) if err != nil { - return fmt.Errorf("parsing Machine Learning Data Store ID `%q`: %+v", d.Id(), err) + return err } resp, err := client.Get(ctx, *id) @@ -172,13 +201,20 @@ func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interf d.SetId("") return nil } - return fmt.Errorf("making Read request on Machine Learning Data Store %q (Resource Group %q): %+v", id.Name, id.ResourceGroupName, err) + return fmt.Errorf("reading %s: %+v", *id, err) } workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) d.Set("name", resp.Model.Name) d.Set("workspace_id", workspaceId.ID()) - return flattenBlobStorage(d, resp.Model.Properties.(datastore.AzureBlobDatastore)) + + data := resp.Model.Properties.(datastore.AzureBlobDatastore) + d.Set("storage_account_name", data.AccountName) + d.Set("container_name", data.ContainerName) + d.Set("description", data.Description) + d.Set("is_default", data.IsDefault) + d.Set("service_data_auth_identity", string(*data.ServiceDataAccessAuthIdentity)) + return flattenAndSetTags(d, *data.Tags) } func resourceMachineLearningDataStoreDelete(d *pluginsdk.ResourceData, meta interface{}) error { @@ -188,60 +224,16 @@ func resourceMachineLearningDataStoreDelete(d *pluginsdk.ResourceData, meta inte id, err := datastore.ParseDataStoreID(d.Id()) if err != nil { - return fmt.Errorf("parsing Machine Learning Workspace Date Store ID `%q`: %+v", d.Id(), err) + return err } if _, err := client.Delete(ctx, *id); err != nil { - return fmt.Errorf("deleting Machine Learning Workspace Date Strore %q (Resource Group %q): %+v", id.Name, id.ResourceGroupName, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } return nil } -func expandBlobStorage(d *pluginsdk.ResourceData) *datastore.AzureBlobDatastore { - storeProps := &datastore.AzureBlobDatastore{ - AccountName: utils.String(d.Get("storage_account_name").(string)), - ContainerName: utils.String(d.Get("container_name").(string)), - Description: utils.String(d.Get("description").(string)), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), - IsDefault: utils.Bool(d.Get("is_default").(bool)), - Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), - } - - accountKey := d.Get("account_key").(string) - if accountKey != "" { - storeProps.Credentials = map[string]interface{}{ - "credentialsType": string(datastore.CredentialsTypeAccountKey), - "secrets": map[string]interface{}{ - "secretsType": "AccountKey", - "key": accountKey, - }, - } - } - - sasToken := d.Get("shared_access_signature").(string) - if sasToken != "" { - storeProps.Credentials = map[string]interface{}{ - "credentialsType": string(datastore.CredentialsTypeSas), - "secrets": map[string]interface{}{ - "secretsType": "Sas", - "sasToken": sasToken, - }, - } - } - - return storeProps -} - -func flattenBlobStorage(d *pluginsdk.ResourceData, data datastore.AzureBlobDatastore) error { - d.Set("description", data.Description) - d.Set("is_default", data.IsDefault) - d.Set("service_data_auth_identity", string(*data.ServiceDataAccessAuthIdentity)) - d.Set("storage_account_name", *data.AccountName) - d.Set("container_name", *data.ContainerName) - return flattenAndSetTags(d, *data.Tags) -} - func expandTags(tagsMap map[string]interface{}) map[string]string { output := make(map[string]string, len(tagsMap)) diff --git a/internal/services/machinelearning/registration.go b/internal/services/machinelearning/registration.go index 91e02c46e84d..5415407b15c8 100644 --- a/internal/services/machinelearning/registration.go +++ b/internal/services/machinelearning/registration.go @@ -36,9 +36,9 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { return map[string]*pluginsdk.Resource{ "azurerm_machine_learning_compute_cluster": resourceComputeCluster(), "azurerm_machine_learning_compute_instance": resourceComputeInstance(), + "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStore(), "azurerm_machine_learning_inference_cluster": resourceAksInferenceCluster(), "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), - "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStore(), } } diff --git a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown index 24fe7fb09c12..dcd55dd9d2f6 100644 --- a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown +++ b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown @@ -81,20 +81,20 @@ The following arguments are supported: * `name` - (Required) The name of the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. -* `workspace_id` - (Required) The ID of the machine learning workspace. Changing this forces a new Machine Learning DataStore to be created. +* `workspace_id` - (Required) The ID of the Machine Learning Workspace. Changing this forces a new Machine Learning DataStore to be created. --- -* `storage_account_name` - (Required) The name of the storage account. Changing this forces a new Machine Learning DataStore to be created. +* `storage_account_name` - (Required) The name of the Storage Account. Changing this forces a new Machine Learning DataStore to be created. -* `container_name` - (Required) The name of the storage account container. Changing this forces a new Machine Learning DataStore to be created. +* `container_name` - (Required) The name of the Storage Account Container. Changing this forces a new Machine Learning DataStore to be created. -* `account_key` - (Optional) The access key of the storage account. Conflicts with `shared_access_signature`. +* `account_key` - (Optional) The access key of the Storage Account. Conflicts with `shared_access_signature`. -* `shared_access_signature` - (Optional) The shared access signature of the storage account. Conflicts with `account_key`. +* `shared_access_signature` - (Optional) The Shared Access Signature of the Storage Account. Conflicts with `account_key`. * `description` - (Optional) Text used to describe the asset. Changing this forces a new Machine Learning DataStore to be created. -* `is_default` - (Optional) A bool indicate if datastore is the workspace default datastore. Defaults to `false`. +* `is_default` - (Optional) Specifies whether this Machines Learning DataStore is the default for the Workspace. Defaults to `false`. ~> **Note:** `is_default` can only be set to `true` on update. From 18ac5d151274698a703ea41de54097f8acd747f5 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 11 Jan 2023 13:59:02 +0800 Subject: [PATCH 10/22] add nil check and format --- ...learning_datastore_blobstorage_resource.go | 28 ++++++++++++++++--- ...ing_datastore_blobstorage_resource_test.go | 20 +++++++------ 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index eb1ad2fa242b..e011bce16ab6 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -209,11 +209,31 @@ func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interf d.Set("workspace_id", workspaceId.ID()) data := resp.Model.Properties.(datastore.AzureBlobDatastore) - d.Set("storage_account_name", data.AccountName) - d.Set("container_name", data.ContainerName) - d.Set("description", data.Description) + + serviceDataAuth := "" + if v := data.ServiceDataAccessAuthIdentity; v != nil { + serviceDataAuth = string(*v) + } + d.Set("service_data_auth_identity", serviceDataAuth) + + storageAccountName := "" + if v := data.AccountName; v != nil { + storageAccountName = *v + } + d.Set("storage_account_name", storageAccountName) + + containerName := "" + if v := data.ContainerName; v != nil { + containerName = *v + } + d.Set("container_name", containerName) + + desc := "" + if v := data.Description; v != nil { + d.Set("description", desc) + } + d.Set("is_default", data.IsDefault) - d.Set("service_data_auth_identity", string(*data.ServiceDataAccessAuthIdentity)) return flattenAndSetTags(d, *data.Tags) } diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go index 20088759011d..ac339f2a9eb4 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go @@ -114,10 +114,10 @@ resource "azurerm_storage_container" "test" { resource "azurerm_machine_learning_datastore_blobstorage" "test" { name = "accdatastore%[2]d" - workspace_id = azurerm_machine_learning_workspace.test.id + workspace_id = azurerm_machine_learning_workspace.test.id storage_account_name = azurerm_storage_account.test.name container_name = azurerm_storage_container.test.name - account_key = azurerm_storage_account.test.primary_access_key + account_key = azurerm_storage_account.test.primary_access_key } `, template, data.RandomInteger) } @@ -169,14 +169,16 @@ data "azurerm_storage_account_sas" "test" { } resource "azurerm_machine_learning_datastore_blobstorage" "test" { - name = "accdatastore%[2]d" - workspace_id = azurerm_machine_learning_workspace.test.id - storage_account_name = azurerm_storage_account.test.name - container_name = azurerm_storage_container.test.name + name = "accdatastore%[2]d" + workspace_id = azurerm_machine_learning_workspace.test.id + storage_account_name = azurerm_storage_account.test.name + container_name = azurerm_storage_container.test.name shared_access_signature = data.azurerm_storage_account_sas.test.sas } + + `, template, data.RandomInteger) } @@ -184,16 +186,18 @@ func (r MachineLearningDataStoreBlobStorage) requiresImport(data acceptance.Test template := r.blobStorageAccountKey(data) return fmt.Sprintf(` + %s resource "azurerm_machine_learning_datastore_blobstorage" "import" { name = azurerm_machine_learning_datastore_blobstorage.test.name - workspace_id = azurerm_machine_learning_datastore_blobstorage.test.workspace_id + workspace_id = azurerm_machine_learning_datastore_blobstorage.test.workspace_id storage_account_name = azurerm_machine_learning_datastore_blobstorage.test.storage_account_name container_name = azurerm_machine_learning_datastore_blobstorage.test.container_name - account_key = azurerm_machine_learning_datastore_blobstorage.test.account_key + account_key = azurerm_machine_learning_datastore_blobstorage.test.account_key } + `, template) } From c7a3ed12fa07e5930f4781f44ab2fe7da37aebb2 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 11 Jan 2023 14:07:50 +0800 Subject: [PATCH 11/22] update doc --- .../r/machine_learning_datastore_blogstorage.html.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown index dcd55dd9d2f6..b044b9e3ebc4 100644 --- a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown +++ b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown @@ -96,9 +96,9 @@ The following arguments are supported: * `is_default` - (Optional) Specifies whether this Machines Learning DataStore is the default for the Workspace. Defaults to `false`. -~> **Note:** `is_default` can only be set to `true` on update. +~> **Note:** `is_default` can only be set to `true` on update. -* `service_data_auth_identity` - (Optional) Indicates which identity to use to authenticate service data access to customer's storage. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. +* `service_data_auth_identity` - (Optional) Specifies which identity to use when retrieving data from the specified source. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. * `tags` - (Optional) A mapping of tags which should be assigned to the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created. From f3f577f5be39aaa4894d8c47cf01ddd5e3ef9703 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 11 Jan 2023 14:08:33 +0800 Subject: [PATCH 12/22] update doc --- .../machine_learning_datastore_blobstorage_resource.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index e011bce16ab6..7cf82820464a 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -2,7 +2,6 @@ package machinelearning import ( "fmt" - "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" "time" "github.com/hashicorp/go-azure-helpers/lang/response" @@ -14,6 +13,7 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" "github.com/hashicorp/terraform-provider-azurerm/utils" From 4bd02bab16eb47612bd710ee2b16c9038631bf55 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Thu, 12 Jan 2023 13:06:51 +0800 Subject: [PATCH 13/22] split create/update method --- ...learning_datastore_blobstorage_resource.go | 65 ++++++++++++++++++- 1 file changed, 62 insertions(+), 3 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index 7cf82820464a..e4e40d2da866 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -21,9 +21,9 @@ import ( func resourceMachineLearningDataStore() *pluginsdk.Resource { resource := &pluginsdk.Resource{ - Create: resourceMachineLearningDataStoreCreateOrUpdate, + Create: resourceMachineLearningDataStoreCreate, Read: resourceMachineLearningDataStoreRead, - Update: resourceMachineLearningDataStoreCreateOrUpdate, + Update: resourceMachineLearningDataStoreUpdate, Delete: resourceMachineLearningDataStoreDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { @@ -114,7 +114,7 @@ func resourceMachineLearningDataStore() *pluginsdk.Resource { return resource } -func resourceMachineLearningDataStoreCreateOrUpdate(d *pluginsdk.ResourceData, meta interface{}) error { +func resourceMachineLearningDataStoreCreate(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MachineLearning.DatastoreClient subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -184,6 +184,65 @@ func resourceMachineLearningDataStoreCreateOrUpdate(d *pluginsdk.ResourceData, m return resourceMachineLearningDataStoreRead(d, meta) } +func resourceMachineLearningDataStoreUpdate(d *pluginsdk.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).MachineLearning.DatastoreClient + subscriptionId := meta.(*clients.Client).Account.SubscriptionId + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) + if err != nil { + return err + } + + id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) + + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(id.Name), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), + } + + props := &datastore.AzureBlobDatastore{ + AccountName: utils.String(d.Get("storage_account_name").(string)), + ContainerName: utils.String(d.Get("container_name").(string)), + Description: utils.String(d.Get("description").(string)), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), + IsDefault: utils.Bool(d.Get("is_default").(bool)), + Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), + } + + accountKey := d.Get("account_key").(string) + if accountKey != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } + + sasToken := d.Get("shared_access_signature").(string) + if sasToken != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + datastoreRaw.Properties = props + + _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("creating/updating %s: %+v", id, err) + } + + d.SetId(id.ID()) + return resourceMachineLearningDataStoreRead(d, meta) +} + func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MachineLearning.DatastoreClient subscriptionId := meta.(*clients.Client).Account.SubscriptionId From 9085d924fc74587c252de4a04d181c2f1f30e2b3 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Thu, 12 Jan 2023 15:15:09 +0800 Subject: [PATCH 14/22] replace storage account name with resource id --- ...learning_datastore_blobstorage_resource.go | 43 ++++++++----------- ...ing_datastore_blobstorage_resource_test.go | 15 ++----- ...arning_datastore_blogstorage.html.markdown | 7 +-- 3 files changed, 23 insertions(+), 42 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index e4e40d2da866..6d15d75354aa 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -11,9 +11,9 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" + storageparse "github.com/hashicorp/terraform-provider-azurerm/internal/services/storage/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" - "github.com/hashicorp/terraform-provider-azurerm/internal/tf/suppress" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" "github.com/hashicorp/terraform-provider-azurerm/utils" @@ -53,15 +53,7 @@ func resourceMachineLearningDataStore() *pluginsdk.Resource { ValidateFunc: validate.WorkspaceID, }, - "storage_account_name": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: suppress.CaseDifference, - ValidateFunc: validation.StringIsNotEmpty, - }, - - "container_name": { + "storage_container_id": { Type: pluginsdk.TypeString, Required: true, ForceNew: true, @@ -138,14 +130,19 @@ func resourceMachineLearningDataStoreCreate(d *pluginsdk.ResourceData, meta inte } } + containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) + if err != nil { + return err + } + datastoreRaw := datastore.DatastoreResource{ Name: utils.String(d.Get("name").(string)), Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), } props := &datastore.AzureBlobDatastore{ - AccountName: utils.String(d.Get("storage_account_name").(string)), - ContainerName: utils.String(d.Get("container_name").(string)), + AccountName: utils.String(containerId.StorageAccountName), + ContainerName: utils.String(containerId.ContainerName), Description: utils.String(d.Get("description").(string)), ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), IsDefault: utils.Bool(d.Get("is_default").(bool)), @@ -197,14 +194,19 @@ func resourceMachineLearningDataStoreUpdate(d *pluginsdk.ResourceData, meta inte id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) + containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) + if err != nil { + return err + } + datastoreRaw := datastore.DatastoreResource{ Name: utils.String(id.Name), Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), } props := &datastore.AzureBlobDatastore{ - AccountName: utils.String(d.Get("storage_account_name").(string)), - ContainerName: utils.String(d.Get("container_name").(string)), + AccountName: utils.String(containerId.StorageAccountName), + ContainerName: utils.String(containerId.ContainerName), Description: utils.String(d.Get("description").(string)), ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), IsDefault: utils.Bool(d.Get("is_default").(bool)), @@ -275,17 +277,8 @@ func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interf } d.Set("service_data_auth_identity", serviceDataAuth) - storageAccountName := "" - if v := data.AccountName; v != nil { - storageAccountName = *v - } - d.Set("storage_account_name", storageAccountName) - - containerName := "" - if v := data.ContainerName; v != nil { - containerName = *v - } - d.Set("container_name", containerName) + containerId := storageparse.NewStorageContainerResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, *data.AccountName, "default", *data.ContainerName) + d.Set("storage_container_id", containerId.ID()) desc := "" if v := data.Description; v != nil { diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go index ac339f2a9eb4..ea37bb12e071 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource_test.go @@ -115,8 +115,7 @@ resource "azurerm_storage_container" "test" { resource "azurerm_machine_learning_datastore_blobstorage" "test" { name = "accdatastore%[2]d" workspace_id = azurerm_machine_learning_workspace.test.id - storage_account_name = azurerm_storage_account.test.name - container_name = azurerm_storage_container.test.name + storage_container_id = azurerm_storage_container.test.resource_manager_id account_key = azurerm_storage_account.test.primary_access_key } `, template, data.RandomInteger) @@ -171,14 +170,9 @@ data "azurerm_storage_account_sas" "test" { resource "azurerm_machine_learning_datastore_blobstorage" "test" { name = "accdatastore%[2]d" workspace_id = azurerm_machine_learning_workspace.test.id - storage_account_name = azurerm_storage_account.test.name - container_name = azurerm_storage_container.test.name + storage_container_id = azurerm_storage_container.test.resource_manager_id shared_access_signature = data.azurerm_storage_account_sas.test.sas } - - - - `, template, data.RandomInteger) } @@ -192,12 +186,9 @@ func (r MachineLearningDataStoreBlobStorage) requiresImport(data acceptance.Test resource "azurerm_machine_learning_datastore_blobstorage" "import" { name = azurerm_machine_learning_datastore_blobstorage.test.name workspace_id = azurerm_machine_learning_datastore_blobstorage.test.workspace_id - storage_account_name = azurerm_machine_learning_datastore_blobstorage.test.storage_account_name - container_name = azurerm_machine_learning_datastore_blobstorage.test.container_name + storage_container_id = azurerm_machine_learning_datastore_blobstorage.test.storage_container_id account_key = azurerm_machine_learning_datastore_blobstorage.test.account_key } - - `, template) } diff --git a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown index b044b9e3ebc4..7fc0edae3b67 100644 --- a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown +++ b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown @@ -69,8 +69,7 @@ resource "azurerm_storage_container" "example" { resource "azurerm_machine_learning_datastore_blobstorage" "example" { name = "example-datastore" workspace_id = azurerm_machine_learning_workspace.example.id - storage_account_name = azurerm_storage_account.example.name - container_name = azurerm_storage_container.example.name + storage_container_id = azurerm_storage_account.example.resource_manager_id account_key = azurerm_storage_account.example.primary_access_key } ``` @@ -84,9 +83,7 @@ The following arguments are supported: * `workspace_id` - (Required) The ID of the Machine Learning Workspace. Changing this forces a new Machine Learning DataStore to be created. --- -* `storage_account_name` - (Required) The name of the Storage Account. Changing this forces a new Machine Learning DataStore to be created. - -* `container_name` - (Required) The name of the Storage Account Container. Changing this forces a new Machine Learning DataStore to be created. +* `storage_container_id` - (Required) The ID of the Storage Account Container. Changing this forces a new Machine Learning DataStore to be created. * `account_key` - (Optional) The access key of the Storage Account. Conflicts with `shared_access_signature`. From 23031a1f94121f503877a4615ce7c4bc6cf8cfbd Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Thu, 12 Jan 2023 15:42:24 +0800 Subject: [PATCH 15/22] rename --- ...learning_datastore_blobstorage_resource.go | 22 +++++++++---------- .../services/machinelearning/registration.go | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index 6d15d75354aa..6abdf9f778a0 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -19,12 +19,12 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/utils" ) -func resourceMachineLearningDataStore() *pluginsdk.Resource { +func resourceMachineLearningDataStoreBlobStorage() *pluginsdk.Resource { resource := &pluginsdk.Resource{ - Create: resourceMachineLearningDataStoreCreate, - Read: resourceMachineLearningDataStoreRead, - Update: resourceMachineLearningDataStoreUpdate, - Delete: resourceMachineLearningDataStoreDelete, + Create: resourceMachineLearningDataStoreBlobStorageCreate, + Read: resourceMachineLearningDataStoreBlobStorageRead, + Update: resourceMachineLearningDataStoreBlobStorageUpdate, + Delete: resourceMachineLearningDataStoreBlobStorageDelete, Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { _, err := datastore.ParseDataStoreID(id) @@ -106,7 +106,7 @@ func resourceMachineLearningDataStore() *pluginsdk.Resource { return resource } -func resourceMachineLearningDataStoreCreate(d *pluginsdk.ResourceData, meta interface{}) error { +func resourceMachineLearningDataStoreBlobStorageCreate(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MachineLearning.DatastoreClient subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -178,10 +178,10 @@ func resourceMachineLearningDataStoreCreate(d *pluginsdk.ResourceData, meta inte } d.SetId(id.ID()) - return resourceMachineLearningDataStoreRead(d, meta) + return resourceMachineLearningDataStoreBlobStorageRead(d, meta) } -func resourceMachineLearningDataStoreUpdate(d *pluginsdk.ResourceData, meta interface{}) error { +func resourceMachineLearningDataStoreBlobStorageUpdate(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MachineLearning.DatastoreClient subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) @@ -242,10 +242,10 @@ func resourceMachineLearningDataStoreUpdate(d *pluginsdk.ResourceData, meta inte } d.SetId(id.ID()) - return resourceMachineLearningDataStoreRead(d, meta) + return resourceMachineLearningDataStoreBlobStorageRead(d, meta) } -func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interface{}) error { +func resourceMachineLearningDataStoreBlobStorageRead(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MachineLearning.DatastoreClient subscriptionId := meta.(*clients.Client).Account.SubscriptionId ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) @@ -289,7 +289,7 @@ func resourceMachineLearningDataStoreRead(d *pluginsdk.ResourceData, meta interf return flattenAndSetTags(d, *data.Tags) } -func resourceMachineLearningDataStoreDelete(d *pluginsdk.ResourceData, meta interface{}) error { +func resourceMachineLearningDataStoreBlobStorageDelete(d *pluginsdk.ResourceData, meta interface{}) error { client := meta.(*clients.Client).MachineLearning.DatastoreClient ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) defer cancel() diff --git a/internal/services/machinelearning/registration.go b/internal/services/machinelearning/registration.go index 5415407b15c8..02fe4b10cb56 100644 --- a/internal/services/machinelearning/registration.go +++ b/internal/services/machinelearning/registration.go @@ -36,7 +36,7 @@ func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { return map[string]*pluginsdk.Resource{ "azurerm_machine_learning_compute_cluster": resourceComputeCluster(), "azurerm_machine_learning_compute_instance": resourceComputeInstance(), - "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStore(), + "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStoreBlobStorage(), "azurerm_machine_learning_inference_cluster": resourceAksInferenceCluster(), "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), From 25eb9751fa7d1c9b40c219b787d1887474b59cea Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 16 Jan 2023 17:22:17 +0800 Subject: [PATCH 16/22] update doc --- .../docs/r/machine_learning_datastore_blogstorage.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown index 7fc0edae3b67..a70392a42f07 100644 --- a/website/docs/r/machine_learning_datastore_blogstorage.html.markdown +++ b/website/docs/r/machine_learning_datastore_blogstorage.html.markdown @@ -82,9 +82,9 @@ The following arguments are supported: * `workspace_id` - (Required) The ID of the Machine Learning Workspace. Changing this forces a new Machine Learning DataStore to be created. ---- * `storage_container_id` - (Required) The ID of the Storage Account Container. Changing this forces a new Machine Learning DataStore to be created. +--- * `account_key` - (Optional) The access key of the Storage Account. Conflicts with `shared_access_signature`. * `shared_access_signature` - (Optional) The Shared Access Signature of the Storage Account. Conflicts with `account_key`. From e694017a7db9e3a8ebcea2b6e32a2bfce792df34 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Mon, 16 Jan 2023 17:24:17 +0800 Subject: [PATCH 17/22] update doc --- .../docs/r/machine_learning_datastore_fileshare.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/r/machine_learning_datastore_fileshare.html.markdown b/website/docs/r/machine_learning_datastore_fileshare.html.markdown index ee6bdb3147f9..d28626811169 100644 --- a/website/docs/r/machine_learning_datastore_fileshare.html.markdown +++ b/website/docs/r/machine_learning_datastore_fileshare.html.markdown @@ -82,9 +82,9 @@ The following arguments are supported: * `workspace_id` - (Required) The ID of the Machine Learning Workspace. Changing this forces a new Machine Learning DataStore to be created. ---- * `storage_fileshare_id` - (Required) The ID of the Storage Account File Share. Changing this forces a new Machine Learning DataStore to be created. +--- * `account_key` - (Optional) The access key of the Storage Account. Conflicts with `shared_access_signature`. * `shared_access_signature` - (Optional) The Shared Access Signature of the Storage Account. Conflicts with `account_key`. From 3ddcc4b7ad297de3a6f66711b8b897ad67c3fc2c Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 18 Jan 2023 12:40:08 +0800 Subject: [PATCH 18/22] refactor to typed resource --- internal/provider/services.go | 1 + ...learning_datastore_blobstorage_resource.go | 850 ++++++++++++------ .../services/machinelearning/registration.go | 24 +- 3 files changed, 611 insertions(+), 264 deletions(-) diff --git a/internal/provider/services.go b/internal/provider/services.go index 78b8a2bbbd68..37c68624ad5b 100644 --- a/internal/provider/services.go +++ b/internal/provider/services.go @@ -143,6 +143,7 @@ func SupportedTypedServices() []sdk.TypedServiceRegistration { labservice.Registration{}, loadbalancer.Registration{}, loganalytics.Registration{}, + machinelearning.Registration{}, monitor.Registration{}, mssql.Registration{}, network.Registration{}, diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index 6abdf9f778a0..427395c820d5 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -1,7 +1,10 @@ package machinelearning import ( + "context" "fmt" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" "time" "github.com/hashicorp/go-azure-helpers/lang/response" @@ -9,324 +12,655 @@ import ( "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" - "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" storageparse "github.com/hashicorp/terraform-provider-azurerm/internal/services/storage/parse" - "github.com/hashicorp/terraform-provider-azurerm/internal/tags" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" "github.com/hashicorp/terraform-provider-azurerm/utils" ) -func resourceMachineLearningDataStoreBlobStorage() *pluginsdk.Resource { - resource := &pluginsdk.Resource{ - Create: resourceMachineLearningDataStoreBlobStorageCreate, - Read: resourceMachineLearningDataStoreBlobStorageRead, - Update: resourceMachineLearningDataStoreBlobStorageUpdate, - Delete: resourceMachineLearningDataStoreBlobStorageDelete, - - Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { - _, err := datastore.ParseDataStoreID(id) - return err - }), - - Timeouts: &pluginsdk.ResourceTimeout{ - Create: pluginsdk.DefaultTimeout(30 * time.Minute), - Read: pluginsdk.DefaultTimeout(5 * time.Minute), - Update: pluginsdk.DefaultTimeout(30 * time.Minute), - Delete: pluginsdk.DefaultTimeout(30 * time.Minute), - }, +type MachineLearningDataStoreBlobStorage struct{} + +type MachineLearningDataStoreBlobStorageModel struct { + Name string `tfschema:"name"` + WorkSpaceID string `tfschema:"workspace_id"` + StorageContainerID string `tfschema:"storage_container_id"` + Description string `tfschema:"description"` + IsDefault bool `tfschema:"is_default"` + ServiceDataAuthIdentity string `tfschema:"service_data_auth_identity"` + AccountKey string `tfschema:"account_key"` + SharedAccessSignature string `tfschema:"shared_access_signature"` + Tags map[string]string `tfschema:"tags"` +} - Schema: map[string]*pluginsdk.Schema{ - "name": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.DataStoreName, - }, +func (r MachineLearningDataStoreBlobStorage) Attributes() map[string]*schema.Schema { + return nil +} - "workspace_id": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validate.WorkspaceID, - }, +func (r MachineLearningDataStoreBlobStorage) ModelObject() interface{} { + return &MachineLearningDataStoreBlobStorageModel{} +} - "storage_container_id": { - Type: pluginsdk.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringIsNotEmpty, - }, +func (r MachineLearningDataStoreBlobStorage) ResourceType() string { + return "azurerm_machine_learning_datastore_blobstorage" +} - "description": { - Type: pluginsdk.TypeString, - Optional: true, - ForceNew: true, - }, +func (r MachineLearningDataStoreBlobStorage) IDValidationFunc() pluginsdk.SchemaValidateFunc { + return datastore.ValidateDataStoreID +} - "is_default": { - Type: pluginsdk.TypeBool, - Optional: true, - Default: false, - }, +var _ sdk.ResourceWithUpdate = MachineLearningDataStoreBlobStorage{} - "service_data_auth_identity": { - Type: pluginsdk.TypeString, - Optional: true, - ValidateFunc: validation.StringInSlice([]string{ - string(datastore.ServiceDataAccessAuthIdentityNone), - string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), - string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), - }, - false), - Default: string(datastore.ServiceDataAccessAuthIdentityNone), - }, +func (r MachineLearningDataStoreBlobStorage) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataStoreName, + }, - "account_key": { - Type: pluginsdk.TypeString, - Optional: true, - Sensitive: true, - ValidateFunc: validation.StringIsNotEmpty, - ExactlyOneOf: []string{"account_key", "shared_access_signature"}, - }, + "workspace_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.WorkspaceID, + }, + + "storage_container_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "description": { + Type: pluginsdk.TypeString, + Optional: true, + ForceNew: true, + }, + + "is_default": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: false, + }, - "shared_access_signature": { - Type: pluginsdk.TypeString, - Optional: true, - Sensitive: true, - ValidateFunc: validation.StringIsNotEmpty, - AtLeastOneOf: []string{"account_key", "shared_access_signature"}, + "service_data_auth_identity": { + Type: pluginsdk.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datastore.ServiceDataAccessAuthIdentityNone), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), + string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), }, + false), + Default: string(datastore.ServiceDataAccessAuthIdentityNone), + }, + + "account_key": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + ExactlyOneOf: []string{"account_key", "shared_access_signature"}, + }, - "tags": commonschema.TagsForceNew(), + "shared_access_signature": { + Type: pluginsdk.TypeString, + Optional: true, + Sensitive: true, + ValidateFunc: validation.StringIsNotEmpty, + AtLeastOneOf: []string{"account_key", "shared_access_signature"}, }, + + "tags": commonschema.TagsForceNew(), } - return resource } -func resourceMachineLearningDataStoreBlobStorageCreate(d *pluginsdk.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).MachineLearning.DatastoreClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() +func (r MachineLearningDataStoreBlobStorage) Create() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient + subscriptionId := metadata.Client.Account.SubscriptionId - workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) - if err != nil { - return err - } + var model MachineLearningDataStoreBlobStorageModel + if err := metadata.Decode(&model); err != nil { + return fmt.Errorf("decoding %+v", err) + } - id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) - if d.IsNewResource() { - existing, err := client.Get(ctx, id) - if err != nil { - if !response.WasNotFound(existing.HttpResponse) { - return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + workspaceId, err := workspaces.ParseWorkspaceID(model.WorkSpaceID) + if err != nil { + return err } - } - if !response.WasNotFound(existing.HttpResponse) { - return tf.ImportAsExistsError("azurerm_machine_learning_datastore_blobstorage", id.ID()) - } - } - containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) - if err != nil { - return err - } + id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, model.Name) - datastoreRaw := datastore.DatastoreResource{ - Name: utils.String(d.Get("name").(string)), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), - } + existing, err := client.Get(ctx, id) + if err != nil { + if !response.WasNotFound(existing.HttpResponse) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + } + } + if !response.WasNotFound(existing.HttpResponse) { + return tf.ImportAsExistsError("azurerm_machine_learning_datastore_blobstorage", id.ID()) + } - props := &datastore.AzureBlobDatastore{ - AccountName: utils.String(containerId.StorageAccountName), - ContainerName: utils.String(containerId.ContainerName), - Description: utils.String(d.Get("description").(string)), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), - IsDefault: utils.Bool(d.Get("is_default").(bool)), - Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), - } + containerId, err := storageparse.StorageContainerResourceManagerID(model.StorageContainerID) + if err != nil { + return err + } - accountKey := d.Get("account_key").(string) - if accountKey != "" { - props.Credentials = map[string]interface{}{ - "credentialsType": string(datastore.CredentialsTypeAccountKey), - "secrets": map[string]interface{}{ - "secretsType": "AccountKey", - "key": accountKey, - }, - } - } + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(model.Name), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), + } - sasToken := d.Get("shared_access_signature").(string) - if sasToken != "" { - props.Credentials = map[string]interface{}{ - "credentialsType": string(datastore.CredentialsTypeSas), - "secrets": map[string]interface{}{ - "secretsType": "Sas", - "sasToken": sasToken, - }, - } - } - datastoreRaw.Properties = props + props := &datastore.AzureBlobDatastore{ + AccountName: utils.String(containerId.StorageAccountName), + ContainerName: utils.String(containerId.ContainerName), + Description: utils.String(model.Description), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataAuthIdentity)), + IsDefault: utils.Bool(model.IsDefault), + Tags: utils.ToPtr(model.Tags), + } - _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) - if err != nil { - return fmt.Errorf("creating/updating %s: %+v", id, err) - } + accountKey := model.AccountKey + if accountKey != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } - d.SetId(id.ID()) - return resourceMachineLearningDataStoreBlobStorageRead(d, meta) -} + sasToken := model.SharedAccessSignature + if sasToken != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + datastoreRaw.Properties = props -func resourceMachineLearningDataStoreBlobStorageUpdate(d *pluginsdk.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).MachineLearning.DatastoreClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) - defer cancel() + _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("creating %s: %+v", id, err) + } - workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) - if err != nil { - return err + metadata.SetID(id) + return nil + }, } +} - id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) - - containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) - if err != nil { - return err - } +func (r MachineLearningDataStoreBlobStorage) Update() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient - datastoreRaw := datastore.DatastoreResource{ - Name: utils.String(id.Name), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), - } + id, err := datastore.ParseDataStoreID(metadata.ResourceData.Id()) + if err != nil { + return err + } - props := &datastore.AzureBlobDatastore{ - AccountName: utils.String(containerId.StorageAccountName), - ContainerName: utils.String(containerId.ContainerName), - Description: utils.String(d.Get("description").(string)), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), - IsDefault: utils.Bool(d.Get("is_default").(bool)), - Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), - } + var state MachineLearningDataStoreBlobStorageModel + if err := metadata.Decode(&state); err != nil { + return err + } - accountKey := d.Get("account_key").(string) - if accountKey != "" { - props.Credentials = map[string]interface{}{ - "credentialsType": string(datastore.CredentialsTypeAccountKey), - "secrets": map[string]interface{}{ - "secretsType": "AccountKey", - "key": accountKey, - }, - } - } + containerId, err := storageparse.StorageContainerResourceManagerID(state.StorageContainerID) + if err != nil { + return err + } - sasToken := d.Get("shared_access_signature").(string) - if sasToken != "" { - props.Credentials = map[string]interface{}{ - "credentialsType": string(datastore.CredentialsTypeSas), - "secrets": map[string]interface{}{ - "secretsType": "Sas", - "sasToken": sasToken, - }, - } - } - datastoreRaw.Properties = props + datastoreRaw := datastore.DatastoreResource{ + Name: utils.String(id.Name), + Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), + } - _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) - if err != nil { - return fmt.Errorf("creating/updating %s: %+v", id, err) - } + props := &datastore.AzureBlobDatastore{ + AccountName: utils.String(containerId.StorageAccountName), + ContainerName: utils.String(containerId.ContainerName), + Description: utils.String(state.Description), + ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataAuthIdentity)), + IsDefault: utils.Bool(state.IsDefault), + Tags: utils.ToPtr(state.Tags), + } - d.SetId(id.ID()) - return resourceMachineLearningDataStoreBlobStorageRead(d, meta) -} + accountKey := state.AccountKey + if accountKey != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeAccountKey), + "secrets": map[string]interface{}{ + "secretsType": "AccountKey", + "key": accountKey, + }, + } + } -func resourceMachineLearningDataStoreBlobStorageRead(d *pluginsdk.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).MachineLearning.DatastoreClient - subscriptionId := meta.(*clients.Client).Account.SubscriptionId - ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) - defer cancel() + sasToken := state.SharedAccessSignature + if sasToken != "" { + props.Credentials = map[string]interface{}{ + "credentialsType": string(datastore.CredentialsTypeSas), + "secrets": map[string]interface{}{ + "secretsType": "Sas", + "sasToken": sasToken, + }, + } + } + datastoreRaw.Properties = props - id, err := datastore.ParseDataStoreID(d.Id()) - if err != nil { - return err - } + _, err = client.CreateOrUpdate(ctx, *id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) + if err != nil { + return fmt.Errorf("updating %s: %+v", id, err) + } - resp, err := client.Get(ctx, *id) - if err != nil { - if response.WasNotFound(resp.HttpResponse) { - d.SetId("") return nil - } - return fmt.Errorf("reading %s: %+v", *id, err) + }, } +} - workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) - d.Set("name", resp.Model.Name) - d.Set("workspace_id", workspaceId.ID()) - - data := resp.Model.Properties.(datastore.AzureBlobDatastore) +func (r MachineLearningDataStoreBlobStorage) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient + subscriptionId := metadata.Client.Account.SubscriptionId - serviceDataAuth := "" - if v := data.ServiceDataAccessAuthIdentity; v != nil { - serviceDataAuth = string(*v) - } - d.Set("service_data_auth_identity", serviceDataAuth) + id, err := datastore.ParseDataStoreID(metadata.ResourceData.Id()) + if err != nil { + return err + } - containerId := storageparse.NewStorageContainerResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, *data.AccountName, "default", *data.ContainerName) - d.Set("storage_container_id", containerId.ID()) + resp, err := client.Get(ctx, *id) + if err != nil { + if response.WasNotFound(resp.HttpResponse) { + return metadata.MarkAsGone(id) + } + return fmt.Errorf("reading %s: %+v", *id, err) + } - desc := "" - if v := data.Description; v != nil { - d.Set("description", desc) - } + workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) + model := MachineLearningDataStoreBlobStorageModel{ + Name: *resp.Model.Name, + WorkSpaceID: workspaceId.ID(), + } - d.Set("is_default", data.IsDefault) - return flattenAndSetTags(d, *data.Tags) -} + data := resp.Model.Properties.(datastore.AzureBlobDatastore) + serviceDataAuth := "" + if v := data.ServiceDataAccessAuthIdentity; v != nil { + serviceDataAuth = string(*v) + } + model.ServiceDataAuthIdentity = serviceDataAuth -func resourceMachineLearningDataStoreBlobStorageDelete(d *pluginsdk.ResourceData, meta interface{}) error { - client := meta.(*clients.Client).MachineLearning.DatastoreClient - ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) - defer cancel() + containerId := storageparse.NewStorageContainerResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, *data.AccountName, "default", *data.ContainerName) + model.StorageContainerID = containerId.ID() + model.IsDefault = *data.IsDefault - id, err := datastore.ParseDataStoreID(d.Id()) - if err != nil { - return err - } + if v, ok := metadata.ResourceData.GetOk("account_key"); ok { + if v.(string) != "" { + model.AccountKey = v.(string) + } + } - if _, err := client.Delete(ctx, *id); err != nil { - return fmt.Errorf("deleting %s: %+v", *id, err) - } + if v, ok := metadata.ResourceData.GetOk("shared_access_signature"); ok { + if v.(string) != "" { + model.SharedAccessSignature = v.(string) + } + } - return nil -} + desc := "" + if v := data.Description; v != nil { + desc = *v + } + model.Description = desc -func expandTags(tagsMap map[string]interface{}) map[string]string { - output := make(map[string]string, len(tagsMap)) + if data.Tags != nil { + model.Tags = *data.Tags + } - for i, v := range tagsMap { - // Validate should have ignored this error already - value, _ := tags.TagValueToString(v) - output[i] = value + return metadata.Encode(&model) + }, } - - return output } -func flattenAndSetTags(d *pluginsdk.ResourceData, tagMap map[string]string) error { - output := make(map[string]interface{}, len(tagMap)) - for i, v := range tagMap { - output[i] = v - } +func (r MachineLearningDataStoreBlobStorage) Delete() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.MachineLearning.DatastoreClient - if err := d.Set("tags", output); err != nil { - return fmt.Errorf("setting `tags`: %s", err) - } + id, err := datastore.ParseDataStoreID(metadata.ResourceData.Id()) + if err != nil { + return err + } - return nil + if _, err := client.Delete(ctx, *id); err != nil { + return fmt.Errorf("deleting %s: %+v", *id, err) + } + + return nil + }, + } } + +// +//func resourceMachineLearningDataStoreBlobStorage() *pluginsdk.Resource { +// resource := &pluginsdk.Resource{ +// Create: resourceMachineLearningDataStoreBlobStorageCreate, +// Read: resourceMachineLearningDataStoreBlobStorageRead, +// Update: resourceMachineLearningDataStoreBlobStorageUpdate, +// Delete: resourceMachineLearningDataStoreBlobStorageDelete, +// +// Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { +// _, err := datastore.ParseDataStoreID(id) +// return err +// }), +// +// Timeouts: &pluginsdk.ResourceTimeout{ +// Create: pluginsdk.DefaultTimeout(30 * time.Minute), +// Read: pluginsdk.DefaultTimeout(5 * time.Minute), +// Update: pluginsdk.DefaultTimeout(30 * time.Minute), +// Delete: pluginsdk.DefaultTimeout(30 * time.Minute), +// }, +// +// Schema: map[string]*pluginsdk.Schema{ +// "name": { +// Type: pluginsdk.TypeString, +// Required: true, +// ForceNew: true, +// ValidateFunc: validate.DataStoreName, +// }, +// +// "workspace_id": { +// Type: pluginsdk.TypeString, +// Required: true, +// ForceNew: true, +// ValidateFunc: validate.WorkspaceID, +// }, +// +// "storage_container_id": { +// Type: pluginsdk.TypeString, +// Required: true, +// ForceNew: true, +// ValidateFunc: validation.StringIsNotEmpty, +// }, +// +// "description": { +// Type: pluginsdk.TypeString, +// Optional: true, +// ForceNew: true, +// }, +// +// "is_default": { +// Type: pluginsdk.TypeBool, +// Optional: true, +// Default: false, +// }, +// +// "service_data_auth_identity": { +// Type: pluginsdk.TypeString, +// Optional: true, +// ValidateFunc: validation.StringInSlice([]string{ +// string(datastore.ServiceDataAccessAuthIdentityNone), +// string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), +// string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), +// }, +// false), +// Default: string(datastore.ServiceDataAccessAuthIdentityNone), +// }, +// +// "account_key": { +// Type: pluginsdk.TypeString, +// Optional: true, +// Sensitive: true, +// ValidateFunc: validation.StringIsNotEmpty, +// ExactlyOneOf: []string{"account_key", "shared_access_signature"}, +// }, +// +// "shared_access_signature": { +// Type: pluginsdk.TypeString, +// Optional: true, +// Sensitive: true, +// ValidateFunc: validation.StringIsNotEmpty, +// AtLeastOneOf: []string{"account_key", "shared_access_signature"}, +// }, +// +// "tags": commonschema.TagsForceNew(), +// }, +// } +// return resource +//} +// +//func resourceMachineLearningDataStoreBlobStorageCreate(d *pluginsdk.ResourceData, meta interface{}) error { +// client := meta.(*clients.Client).MachineLearning.DatastoreClient +// subscriptionId := meta.(*clients.Client).Account.SubscriptionId +// ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) +// defer cancel() +// +// workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) +// if err != nil { +// return err +// } +// +// id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) +// if d.IsNewResource() { +// existing, err := client.Get(ctx, id) +// if err != nil { +// if !response.WasNotFound(existing.HttpResponse) { +// return fmt.Errorf("checking for presence of existing %s: %+v", id, err) +// } +// } +// if !response.WasNotFound(existing.HttpResponse) { +// return tf.ImportAsExistsError("azurerm_machine_learning_datastore_blobstorage", id.ID()) +// } +// } +// +// containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) +// if err != nil { +// return err +// } +// +// datastoreRaw := datastore.DatastoreResource{ +// Name: utils.String(d.Get("name").(string)), +// Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), +// } +// +// props := &datastore.AzureBlobDatastore{ +// AccountName: utils.String(containerId.StorageAccountName), +// ContainerName: utils.String(containerId.ContainerName), +// Description: utils.String(d.Get("description").(string)), +// ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), +// IsDefault: utils.Bool(d.Get("is_default").(bool)), +// Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), +// } +// +// accountKey := d.Get("account_key").(string) +// if accountKey != "" { +// props.Credentials = map[string]interface{}{ +// "credentialsType": string(datastore.CredentialsTypeAccountKey), +// "secrets": map[string]interface{}{ +// "secretsType": "AccountKey", +// "key": accountKey, +// }, +// } +// } +// +// sasToken := d.Get("shared_access_signature").(string) +// if sasToken != "" { +// props.Credentials = map[string]interface{}{ +// "credentialsType": string(datastore.CredentialsTypeSas), +// "secrets": map[string]interface{}{ +// "secretsType": "Sas", +// "sasToken": sasToken, +// }, +// } +// } +// datastoreRaw.Properties = props +// +// _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) +// if err != nil { +// return fmt.Errorf("creating/updating %s: %+v", id, err) +// } +// +// d.SetId(id.ID()) +// return resourceMachineLearningDataStoreBlobStorageRead(d, meta) +//} +// +//func resourceMachineLearningDataStoreBlobStorageUpdate(d *pluginsdk.ResourceData, meta interface{}) error { +// client := meta.(*clients.Client).MachineLearning.DatastoreClient +// subscriptionId := meta.(*clients.Client).Account.SubscriptionId +// ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) +// defer cancel() +// +// workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) +// if err != nil { +// return err +// } +// +// id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) +// +// containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) +// if err != nil { +// return err +// } +// +// datastoreRaw := datastore.DatastoreResource{ +// Name: utils.String(id.Name), +// Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), +// } +// +// props := &datastore.AzureBlobDatastore{ +// AccountName: utils.String(containerId.StorageAccountName), +// ContainerName: utils.String(containerId.ContainerName), +// Description: utils.String(d.Get("description").(string)), +// ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), +// IsDefault: utils.Bool(d.Get("is_default").(bool)), +// Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), +// } +// +// accountKey := d.Get("account_key").(string) +// if accountKey != "" { +// props.Credentials = map[string]interface{}{ +// "credentialsType": string(datastore.CredentialsTypeAccountKey), +// "secrets": map[string]interface{}{ +// "secretsType": "AccountKey", +// "key": accountKey, +// }, +// } +// } +// +// sasToken := d.Get("shared_access_signature").(string) +// if sasToken != "" { +// props.Credentials = map[string]interface{}{ +// "credentialsType": string(datastore.CredentialsTypeSas), +// "secrets": map[string]interface{}{ +// "secretsType": "Sas", +// "sasToken": sasToken, +// }, +// } +// } +// datastoreRaw.Properties = props +// +// _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) +// if err != nil { +// return fmt.Errorf("creating/updating %s: %+v", id, err) +// } +// +// d.SetId(id.ID()) +// return resourceMachineLearningDataStoreBlobStorageRead(d, meta) +//} +// +//func resourceMachineLearningDataStoreBlobStorageRead(d *pluginsdk.ResourceData, meta interface{}) error { +// client := meta.(*clients.Client).MachineLearning.DatastoreClient +// subscriptionId := meta.(*clients.Client).Account.SubscriptionId +// ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) +// defer cancel() +// +// id, err := datastore.ParseDataStoreID(d.Id()) +// if err != nil { +// return err +// } +// +// resp, err := client.Get(ctx, *id) +// if err != nil { +// if response.WasNotFound(resp.HttpResponse) { +// d.SetId("") +// return nil +// } +// return fmt.Errorf("reading %s: %+v", *id, err) +// } +// +// workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) +// d.Set("name", resp.Model.Name) +// d.Set("workspace_id", workspaceId.ID()) +// +// data := resp.Model.Properties.(datastore.AzureBlobDatastore) +// +// serviceDataAuth := "" +// if v := data.ServiceDataAccessAuthIdentity; v != nil { +// serviceDataAuth = string(*v) +// } +// d.Set("service_data_auth_identity", serviceDataAuth) +// +// containerId := storageparse.NewStorageContainerResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, *data.AccountName, "default", *data.ContainerName) +// d.Set("storage_container_id", containerId.ID()) +// +// desc := "" +// if v := data.Description; v != nil { +// d.Set("description", desc) +// } +// +// d.Set("is_default", data.IsDefault) +// return flattenAndSetTags(d, *data.Tags) +//} +// +//func resourceMachineLearningDataStoreBlobStorageDelete(d *pluginsdk.ResourceData, meta interface{}) error { +// client := meta.(*clients.Client).MachineLearning.DatastoreClient +// ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) +// defer cancel() +// +// id, err := datastore.ParseDataStoreID(d.Id()) +// if err != nil { +// return err +// } +// +// if _, err := client.Delete(ctx, *id); err != nil { +// return fmt.Errorf("deleting %s: %+v", *id, err) +// } +// +// return nil +//} +// +//func expandTags(tagsMap map[string]interface{}) map[string]string { +// output := make(map[string]string, len(tagsMap)) +// +// for i, v := range tagsMap { +// // Validate should have ignored this error already +// value, _ := tags.TagValueToString(v) +// output[i] = value +// } +// +// return output +//} +// +//func flattenAndSetTags(d *pluginsdk.ResourceData, tagMap map[string]string) error { +// output := make(map[string]interface{}, len(tagMap)) +// for i, v := range tagMap { +// output[i] = v +// } +// +// if err := d.Set("tags", output); err != nil { +// return fmt.Errorf("setting `tags`: %s", err) +// } +// +// return nil +//} diff --git a/internal/services/machinelearning/registration.go b/internal/services/machinelearning/registration.go index 02fe4b10cb56..b393ce684419 100644 --- a/internal/services/machinelearning/registration.go +++ b/internal/services/machinelearning/registration.go @@ -7,6 +7,7 @@ import ( type Registration struct{} +var _ sdk.TypedServiceRegistration = Registration{} var _ sdk.UntypedServiceRegistrationWithAGitHubLabel = Registration{} func (r Registration) AssociatedGitHubLabel() string { @@ -34,11 +35,22 @@ func (r Registration) SupportedDataSources() map[string]*pluginsdk.Resource { // SupportedResources returns the supported Resources supported by this Service func (r Registration) SupportedResources() map[string]*pluginsdk.Resource { return map[string]*pluginsdk.Resource{ - "azurerm_machine_learning_compute_cluster": resourceComputeCluster(), - "azurerm_machine_learning_compute_instance": resourceComputeInstance(), - "azurerm_machine_learning_datastore_blobstorage": resourceMachineLearningDataStoreBlobStorage(), - "azurerm_machine_learning_inference_cluster": resourceAksInferenceCluster(), - "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), - "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), + "azurerm_machine_learning_compute_cluster": resourceComputeCluster(), + "azurerm_machine_learning_compute_instance": resourceComputeInstance(), + "azurerm_machine_learning_inference_cluster": resourceAksInferenceCluster(), + "azurerm_machine_learning_synapse_spark": resourceSynapseSpark(), + "azurerm_machine_learning_workspace": resourceMachineLearningWorkspace(), + } +} + +// DataSources returns the typed DataSources supported by this service +func (r Registration) DataSources() []sdk.DataSource { + return []sdk.DataSource{} +} + +// Resources returns the typed Resources supported by this service +func (r Registration) Resources() []sdk.Resource { + return []sdk.Resource{ + MachineLearningDataStoreBlobStorage{}, } } From 76c24b50db50627f0572b1311a5d5bc08779bbaa Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 18 Jan 2023 12:56:38 +0800 Subject: [PATCH 19/22] delete unused code --- ...learning_datastore_blobstorage_resource.go | 313 ------------------ 1 file changed, 313 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index 427395c820d5..a0e65537a03a 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -351,316 +351,3 @@ func (r MachineLearningDataStoreBlobStorage) Delete() sdk.ResourceFunc { }, } } - -// -//func resourceMachineLearningDataStoreBlobStorage() *pluginsdk.Resource { -// resource := &pluginsdk.Resource{ -// Create: resourceMachineLearningDataStoreBlobStorageCreate, -// Read: resourceMachineLearningDataStoreBlobStorageRead, -// Update: resourceMachineLearningDataStoreBlobStorageUpdate, -// Delete: resourceMachineLearningDataStoreBlobStorageDelete, -// -// Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error { -// _, err := datastore.ParseDataStoreID(id) -// return err -// }), -// -// Timeouts: &pluginsdk.ResourceTimeout{ -// Create: pluginsdk.DefaultTimeout(30 * time.Minute), -// Read: pluginsdk.DefaultTimeout(5 * time.Minute), -// Update: pluginsdk.DefaultTimeout(30 * time.Minute), -// Delete: pluginsdk.DefaultTimeout(30 * time.Minute), -// }, -// -// Schema: map[string]*pluginsdk.Schema{ -// "name": { -// Type: pluginsdk.TypeString, -// Required: true, -// ForceNew: true, -// ValidateFunc: validate.DataStoreName, -// }, -// -// "workspace_id": { -// Type: pluginsdk.TypeString, -// Required: true, -// ForceNew: true, -// ValidateFunc: validate.WorkspaceID, -// }, -// -// "storage_container_id": { -// Type: pluginsdk.TypeString, -// Required: true, -// ForceNew: true, -// ValidateFunc: validation.StringIsNotEmpty, -// }, -// -// "description": { -// Type: pluginsdk.TypeString, -// Optional: true, -// ForceNew: true, -// }, -// -// "is_default": { -// Type: pluginsdk.TypeBool, -// Optional: true, -// Default: false, -// }, -// -// "service_data_auth_identity": { -// Type: pluginsdk.TypeString, -// Optional: true, -// ValidateFunc: validation.StringInSlice([]string{ -// string(datastore.ServiceDataAccessAuthIdentityNone), -// string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity), -// string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity), -// }, -// false), -// Default: string(datastore.ServiceDataAccessAuthIdentityNone), -// }, -// -// "account_key": { -// Type: pluginsdk.TypeString, -// Optional: true, -// Sensitive: true, -// ValidateFunc: validation.StringIsNotEmpty, -// ExactlyOneOf: []string{"account_key", "shared_access_signature"}, -// }, -// -// "shared_access_signature": { -// Type: pluginsdk.TypeString, -// Optional: true, -// Sensitive: true, -// ValidateFunc: validation.StringIsNotEmpty, -// AtLeastOneOf: []string{"account_key", "shared_access_signature"}, -// }, -// -// "tags": commonschema.TagsForceNew(), -// }, -// } -// return resource -//} -// -//func resourceMachineLearningDataStoreBlobStorageCreate(d *pluginsdk.ResourceData, meta interface{}) error { -// client := meta.(*clients.Client).MachineLearning.DatastoreClient -// subscriptionId := meta.(*clients.Client).Account.SubscriptionId -// ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) -// defer cancel() -// -// workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) -// if err != nil { -// return err -// } -// -// id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) -// if d.IsNewResource() { -// existing, err := client.Get(ctx, id) -// if err != nil { -// if !response.WasNotFound(existing.HttpResponse) { -// return fmt.Errorf("checking for presence of existing %s: %+v", id, err) -// } -// } -// if !response.WasNotFound(existing.HttpResponse) { -// return tf.ImportAsExistsError("azurerm_machine_learning_datastore_blobstorage", id.ID()) -// } -// } -// -// containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) -// if err != nil { -// return err -// } -// -// datastoreRaw := datastore.DatastoreResource{ -// Name: utils.String(d.Get("name").(string)), -// Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), -// } -// -// props := &datastore.AzureBlobDatastore{ -// AccountName: utils.String(containerId.StorageAccountName), -// ContainerName: utils.String(containerId.ContainerName), -// Description: utils.String(d.Get("description").(string)), -// ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), -// IsDefault: utils.Bool(d.Get("is_default").(bool)), -// Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), -// } -// -// accountKey := d.Get("account_key").(string) -// if accountKey != "" { -// props.Credentials = map[string]interface{}{ -// "credentialsType": string(datastore.CredentialsTypeAccountKey), -// "secrets": map[string]interface{}{ -// "secretsType": "AccountKey", -// "key": accountKey, -// }, -// } -// } -// -// sasToken := d.Get("shared_access_signature").(string) -// if sasToken != "" { -// props.Credentials = map[string]interface{}{ -// "credentialsType": string(datastore.CredentialsTypeSas), -// "secrets": map[string]interface{}{ -// "secretsType": "Sas", -// "sasToken": sasToken, -// }, -// } -// } -// datastoreRaw.Properties = props -// -// _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) -// if err != nil { -// return fmt.Errorf("creating/updating %s: %+v", id, err) -// } -// -// d.SetId(id.ID()) -// return resourceMachineLearningDataStoreBlobStorageRead(d, meta) -//} -// -//func resourceMachineLearningDataStoreBlobStorageUpdate(d *pluginsdk.ResourceData, meta interface{}) error { -// client := meta.(*clients.Client).MachineLearning.DatastoreClient -// subscriptionId := meta.(*clients.Client).Account.SubscriptionId -// ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) -// defer cancel() -// -// workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string)) -// if err != nil { -// return err -// } -// -// id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string)) -// -// containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string)) -// if err != nil { -// return err -// } -// -// datastoreRaw := datastore.DatastoreResource{ -// Name: utils.String(id.Name), -// Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), -// } -// -// props := &datastore.AzureBlobDatastore{ -// AccountName: utils.String(containerId.StorageAccountName), -// ContainerName: utils.String(containerId.ContainerName), -// Description: utils.String(d.Get("description").(string)), -// ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))), -// IsDefault: utils.Bool(d.Get("is_default").(bool)), -// Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))), -// } -// -// accountKey := d.Get("account_key").(string) -// if accountKey != "" { -// props.Credentials = map[string]interface{}{ -// "credentialsType": string(datastore.CredentialsTypeAccountKey), -// "secrets": map[string]interface{}{ -// "secretsType": "AccountKey", -// "key": accountKey, -// }, -// } -// } -// -// sasToken := d.Get("shared_access_signature").(string) -// if sasToken != "" { -// props.Credentials = map[string]interface{}{ -// "credentialsType": string(datastore.CredentialsTypeSas), -// "secrets": map[string]interface{}{ -// "secretsType": "Sas", -// "sasToken": sasToken, -// }, -// } -// } -// datastoreRaw.Properties = props -// -// _, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions()) -// if err != nil { -// return fmt.Errorf("creating/updating %s: %+v", id, err) -// } -// -// d.SetId(id.ID()) -// return resourceMachineLearningDataStoreBlobStorageRead(d, meta) -//} -// -//func resourceMachineLearningDataStoreBlobStorageRead(d *pluginsdk.ResourceData, meta interface{}) error { -// client := meta.(*clients.Client).MachineLearning.DatastoreClient -// subscriptionId := meta.(*clients.Client).Account.SubscriptionId -// ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) -// defer cancel() -// -// id, err := datastore.ParseDataStoreID(d.Id()) -// if err != nil { -// return err -// } -// -// resp, err := client.Get(ctx, *id) -// if err != nil { -// if response.WasNotFound(resp.HttpResponse) { -// d.SetId("") -// return nil -// } -// return fmt.Errorf("reading %s: %+v", *id, err) -// } -// -// workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName) -// d.Set("name", resp.Model.Name) -// d.Set("workspace_id", workspaceId.ID()) -// -// data := resp.Model.Properties.(datastore.AzureBlobDatastore) -// -// serviceDataAuth := "" -// if v := data.ServiceDataAccessAuthIdentity; v != nil { -// serviceDataAuth = string(*v) -// } -// d.Set("service_data_auth_identity", serviceDataAuth) -// -// containerId := storageparse.NewStorageContainerResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, *data.AccountName, "default", *data.ContainerName) -// d.Set("storage_container_id", containerId.ID()) -// -// desc := "" -// if v := data.Description; v != nil { -// d.Set("description", desc) -// } -// -// d.Set("is_default", data.IsDefault) -// return flattenAndSetTags(d, *data.Tags) -//} -// -//func resourceMachineLearningDataStoreBlobStorageDelete(d *pluginsdk.ResourceData, meta interface{}) error { -// client := meta.(*clients.Client).MachineLearning.DatastoreClient -// ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) -// defer cancel() -// -// id, err := datastore.ParseDataStoreID(d.Id()) -// if err != nil { -// return err -// } -// -// if _, err := client.Delete(ctx, *id); err != nil { -// return fmt.Errorf("deleting %s: %+v", *id, err) -// } -// -// return nil -//} -// -//func expandTags(tagsMap map[string]interface{}) map[string]string { -// output := make(map[string]string, len(tagsMap)) -// -// for i, v := range tagsMap { -// // Validate should have ignored this error already -// value, _ := tags.TagValueToString(v) -// output[i] = value -// } -// -// return output -//} -// -//func flattenAndSetTags(d *pluginsdk.ResourceData, tagMap map[string]string) error { -// output := make(map[string]interface{}, len(tagMap)) -// for i, v := range tagMap { -// output[i] = v -// } -// -// if err := d.Set("tags", output); err != nil { -// return fmt.Errorf("setting `tags`: %s", err) -// } -// -// return nil -//} From 9732507144028cd2ae13f92e81955584228da895 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Wed, 18 Jan 2023 13:03:51 +0800 Subject: [PATCH 20/22] format --- .../machine_learning_datastore_blobstorage_resource.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index a0e65537a03a..f6220e9fadca 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -3,15 +3,15 @@ package machinelearning import ( "context" "fmt" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" "time" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore" "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" storageparse "github.com/hashicorp/terraform-provider-azurerm/internal/services/storage/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" From 5065bfc25313687302091822059d61613ac5bb34 Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Tue, 31 Jan 2023 15:14:48 +0800 Subject: [PATCH 21/22] change is_default to compute --- ...e_learning_datastore_fileshare_resource.go | 21 ++++++++----------- ...learning_datastore_fileshare.html.markdown | 4 ++-- 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go index 5b9abe7ac439..fcf9586abdec 100644 --- a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go @@ -33,10 +33,6 @@ type MachineLearningDataStoreFileShareModel struct { Tags map[string]string `tfschema:"tags"` } -func (r MachineLearningDataStoreFileShare) Attributes() map[string]*schema.Schema { - return nil -} - func (r MachineLearningDataStoreFileShare) ModelObject() interface{} { return &MachineLearningDataStoreFileShareModel{} } @@ -80,12 +76,6 @@ func (r MachineLearningDataStoreFileShare) Arguments() map[string]*pluginsdk.Sch ForceNew: true, }, - "is_default": { - Type: pluginsdk.TypeBool, - Optional: true, - Default: false, - }, - "service_data_identity": { Type: pluginsdk.TypeString, Optional: true, @@ -118,6 +108,15 @@ func (r MachineLearningDataStoreFileShare) Arguments() map[string]*pluginsdk.Sch } } +func (r MachineLearningDataStoreFileShare) Attributes() map[string]*schema.Schema { + return map[string]*pluginsdk.Schema{ + "is_default": { + Type: pluginsdk.TypeBool, + Computed: true, + }, + } +} + func (r MachineLearningDataStoreFileShare) Create() sdk.ResourceFunc { return sdk.ResourceFunc{ Timeout: 30 * time.Minute, @@ -162,7 +161,6 @@ func (r MachineLearningDataStoreFileShare) Create() sdk.ResourceFunc { FileShareName: fileShareId.FileshareName, Description: utils.String(model.Description), ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataIdentity)), - IsDefault: utils.Bool(model.IsDefault), Tags: utils.ToPtr(model.Tags), } @@ -231,7 +229,6 @@ func (r MachineLearningDataStoreFileShare) Update() sdk.ResourceFunc { FileShareName: fileShareId.FileshareName, Description: utils.String(state.Description), ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataIdentity)), - IsDefault: utils.Bool(state.IsDefault), Tags: utils.ToPtr(state.Tags), } diff --git a/website/docs/r/machine_learning_datastore_fileshare.html.markdown b/website/docs/r/machine_learning_datastore_fileshare.html.markdown index d401f80f458c..71aa7f22570b 100644 --- a/website/docs/r/machine_learning_datastore_fileshare.html.markdown +++ b/website/docs/r/machine_learning_datastore_fileshare.html.markdown @@ -91,8 +91,6 @@ The following arguments are supported: * `description` - (Optional) Text used to describe the asset. Changing this forces a new Machine Learning DataStore to be created. -* `is_default` - (Optional) Specifies whether this Machines Learning DataStore is the default for the Workspace. Defaults to `false`. - ~> **Note:** `is_default` can only be set to `true` on update. * `service_data_identity` - (Optional) Specifies which identity to use when retrieving data from the specified source. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. @@ -105,6 +103,8 @@ In addition to the Arguments listed above - the following Attributes are exporte * `id` - The ID of the Machine Learning DataStore. +* `is_default` - Indicate whether this Machines Learning DataStore is the default for the Workspace. + ## Timeouts The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: From 4b308f4279504075f93682436edb5cb9238a178d Mon Sep 17 00:00:00 2001 From: xuzhang3 Date: Tue, 31 Jan 2023 15:15:54 +0800 Subject: [PATCH 22/22] update doc --- .../docs/r/machine_learning_datastore_fileshare.html.markdown | 2 -- 1 file changed, 2 deletions(-) diff --git a/website/docs/r/machine_learning_datastore_fileshare.html.markdown b/website/docs/r/machine_learning_datastore_fileshare.html.markdown index 71aa7f22570b..25e0fd8b112e 100644 --- a/website/docs/r/machine_learning_datastore_fileshare.html.markdown +++ b/website/docs/r/machine_learning_datastore_fileshare.html.markdown @@ -91,8 +91,6 @@ The following arguments are supported: * `description` - (Optional) Text used to describe the asset. Changing this forces a new Machine Learning DataStore to be created. -~> **Note:** `is_default` can only be set to `true` on update. - * `service_data_identity` - (Optional) Specifies which identity to use when retrieving data from the specified source. Defaults to `None`. Possible values are `None`, `WorkspaceSystemAssignedIdentity` and `WorkspaceUserAssignedIdentity`. * `tags` - (Optional) A mapping of tags which should be assigned to the Machine Learning DataStore. Changing this forces a new Machine Learning DataStore to be created.