Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[New resource] - azurerm_machine_learning_datastore_datalake_gen2 #20045

Merged
merged 36 commits into from
Feb 6, 2023
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
756c9ad
machine learning data store
Jan 9, 2023
0513273
vendor files
Jan 9, 2023
8baf93f
update doc
Jan 9, 2023
29e4044
optimize expandBlobStorage
Jan 9, 2023
6cd24d6
update test
Jan 9, 2023
1d5f284
update error message
Jan 9, 2023
065283e
update test case
Jan 9, 2023
9842272
machine learning data store file share
Jan 10, 2023
ecd57bc
machine learning data store blog storage
Jan 11, 2023
18ac5d1
add nil check and format
Jan 11, 2023
c7a3ed1
update doc
Jan 11, 2023
f3f577f
update doc
Jan 11, 2023
4bd02ba
split create/update method
Jan 12, 2023
9085d92
replace storage account name with resource id
Jan 12, 2023
23031a1
rename
Jan 12, 2023
ced5ddc
machine learning data store file share
Jan 12, 2023
c3f5b21
machine learning data store data lake gen1
Jan 16, 2023
e8b9170
mark secret as sensitive
Jan 16, 2023
25eb975
update doc
Jan 16, 2023
e694017
update doc
Jan 16, 2023
a88e249
machine learning datastore data lake gen2
Jan 17, 2023
356b096
format
Jan 17, 2023
8b922fc
update doc
Jan 17, 2023
3ddcc4b
refactor to typed resource
Jan 18, 2023
9eed7f1
Merge remote-tracking branch 'origin/main'
Jan 18, 2023
76c24b5
delete unused code
Jan 18, 2023
9732507
format
Jan 18, 2023
4026e2c
refactor to typed resource
Jan 18, 2023
3b9b24d
Merge branch 'f/datastore_azureblob' into f/datastore_gen1
Jan 18, 2023
1e17fa7
Merge branch 'f/datestore_fileshare' into f/datastore_gen1
Jan 18, 2023
b537780
refactor to typed resource
Jan 18, 2023
5e1662a
refactor to typed resource
Jan 18, 2023
88dc4e5
remove data lake gen1
Jan 30, 2023
f25877f
rename service_data_identity to service_data_auth_identity
Jan 30, 2023
5aa06b0
change is_default to computed
Jan 31, 2023
4ba004a
Merge branch 'main' into f/datastore_datalakegen2
katbyte Feb 6, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions internal/services/machinelearning/client/client.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package client

import (
"github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore"
"github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/machinelearningcomputes"
"github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces"
"github.com/hashicorp/terraform-provider-azurerm/internal/common"
Expand All @@ -9,6 +10,7 @@ import (
type Client struct {
ComputeClient *machinelearningcomputes.MachineLearningComputesClient
WorkspacesClient *workspaces.WorkspacesClient
DatastoreClient *datastore.DatastoreClient
}

func NewClient(o *common.ClientOptions) *Client {
Expand All @@ -18,8 +20,12 @@ func NewClient(o *common.ClientOptions) *Client {
WorkspacesClient := workspaces.NewWorkspacesClientWithBaseURI(o.ResourceManagerEndpoint)
o.ConfigureClient(&WorkspacesClient.Client, o.ResourceManagerAuthorizer)

DatastoreClient := datastore.NewDatastoreClientWithBaseURI(o.ResourceManagerEndpoint)
o.ConfigureClient(&DatastoreClient.Client, o.ResourceManagerAuthorizer)

return &Client{
ComputeClient: &ComputeClient,
WorkspacesClient: &WorkspacesClient,
DatastoreClient: &DatastoreClient,
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,332 @@
package machinelearning

import (
"fmt"
"time"

"github.com/hashicorp/go-azure-helpers/lang/response"
"github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema"
"github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/datastore"
"github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2022-05-01/workspaces"
"github.com/hashicorp/terraform-provider-azurerm/helpers/tf"
"github.com/hashicorp/terraform-provider-azurerm/internal/clients"
"github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate"
storageparse "github.com/hashicorp/terraform-provider-azurerm/internal/services/storage/parse"
"github.com/hashicorp/terraform-provider-azurerm/internal/tags"
"github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk"
"github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation"
"github.com/hashicorp/terraform-provider-azurerm/internal/timeouts"
"github.com/hashicorp/terraform-provider-azurerm/utils"
)

func resourceMachineLearningDataStoreBlobStorage() *pluginsdk.Resource {
resource := &pluginsdk.Resource{
Create: resourceMachineLearningDataStoreBlobStorageCreate,
Read: resourceMachineLearningDataStoreBlobStorageRead,
Update: resourceMachineLearningDataStoreBlobStorageUpdate,
Delete: resourceMachineLearningDataStoreBlobStorageDelete,

Importer: pluginsdk.ImporterValidatingResourceId(func(id string) error {
_, err := datastore.ParseDataStoreID(id)
return err
}),

Timeouts: &pluginsdk.ResourceTimeout{
Create: pluginsdk.DefaultTimeout(30 * time.Minute),
Read: pluginsdk.DefaultTimeout(5 * time.Minute),
Update: pluginsdk.DefaultTimeout(30 * time.Minute),
Delete: pluginsdk.DefaultTimeout(30 * time.Minute),
},

Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validate.DataStoreName,
},

"workspace_id": {
Type: pluginsdk.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validate.WorkspaceID,
},

"storage_container_id": {
Type: pluginsdk.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"description": {
Type: pluginsdk.TypeString,
Optional: true,
ForceNew: true,
},

"is_default": {
Type: pluginsdk.TypeBool,
Optional: true,
Default: false,
},

"service_data_auth_identity": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{
string(datastore.ServiceDataAccessAuthIdentityNone),
string(datastore.ServiceDataAccessAuthIdentityWorkspaceSystemAssignedIdentity),
string(datastore.ServiceDataAccessAuthIdentityWorkspaceUserAssignedIdentity),
},
false),
Default: string(datastore.ServiceDataAccessAuthIdentityNone),
},

"account_key": {
Type: pluginsdk.TypeString,
Optional: true,
Sensitive: true,
ValidateFunc: validation.StringIsNotEmpty,
ExactlyOneOf: []string{"account_key", "shared_access_signature"},
},

"shared_access_signature": {
Type: pluginsdk.TypeString,
Optional: true,
Sensitive: true,
ValidateFunc: validation.StringIsNotEmpty,
AtLeastOneOf: []string{"account_key", "shared_access_signature"},
},

"tags": commonschema.TagsForceNew(),
},
}
return resource
}

func resourceMachineLearningDataStoreBlobStorageCreate(d *pluginsdk.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).MachineLearning.DatastoreClient
subscriptionId := meta.(*clients.Client).Account.SubscriptionId
ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d)
defer cancel()

workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string))
if err != nil {
return err
}

id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string))
if d.IsNewResource() {
existing, err := client.Get(ctx, id)
if err != nil {
if !response.WasNotFound(existing.HttpResponse) {
return fmt.Errorf("checking for presence of existing %s: %+v", id, err)
}
}
if !response.WasNotFound(existing.HttpResponse) {
return tf.ImportAsExistsError("azurerm_machine_learning_datastore_blobstorage", id.ID())
}
}

containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string))
if err != nil {
return err
}

datastoreRaw := datastore.DatastoreResource{
Name: utils.String(d.Get("name").(string)),
Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)),
}

props := &datastore.AzureBlobDatastore{
AccountName: utils.String(containerId.StorageAccountName),
ContainerName: utils.String(containerId.ContainerName),
Description: utils.String(d.Get("description").(string)),
ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))),
IsDefault: utils.Bool(d.Get("is_default").(bool)),
Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))),
}

accountKey := d.Get("account_key").(string)
if accountKey != "" {
props.Credentials = map[string]interface{}{
"credentialsType": string(datastore.CredentialsTypeAccountKey),
"secrets": map[string]interface{}{
"secretsType": "AccountKey",
"key": accountKey,
},
}
}

sasToken := d.Get("shared_access_signature").(string)
if sasToken != "" {
props.Credentials = map[string]interface{}{
"credentialsType": string(datastore.CredentialsTypeSas),
"secrets": map[string]interface{}{
"secretsType": "Sas",
"sasToken": sasToken,
},
}
}
datastoreRaw.Properties = props

_, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions())
if err != nil {
return fmt.Errorf("creating/updating %s: %+v", id, err)
}

d.SetId(id.ID())
return resourceMachineLearningDataStoreBlobStorageRead(d, meta)
}

func resourceMachineLearningDataStoreBlobStorageUpdate(d *pluginsdk.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).MachineLearning.DatastoreClient
subscriptionId := meta.(*clients.Client).Account.SubscriptionId
ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d)
defer cancel()

workspaceId, err := workspaces.ParseWorkspaceID(d.Get("workspace_id").(string))
if err != nil {
return err
}

id := datastore.NewDataStoreID(subscriptionId, workspaceId.ResourceGroupName, workspaceId.WorkspaceName, d.Get("name").(string))

containerId, err := storageparse.StorageContainerResourceManagerID(d.Get("storage_container_id").(string))
if err != nil {
return err
}

datastoreRaw := datastore.DatastoreResource{
Name: utils.String(id.Name),
Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)),
}

props := &datastore.AzureBlobDatastore{
AccountName: utils.String(containerId.StorageAccountName),
ContainerName: utils.String(containerId.ContainerName),
Description: utils.String(d.Get("description").(string)),
ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(d.Get("service_data_auth_identity").(string))),
IsDefault: utils.Bool(d.Get("is_default").(bool)),
Tags: utils.ToPtr(expandTags(d.Get("tags").(map[string]interface{}))),
}

accountKey := d.Get("account_key").(string)
if accountKey != "" {
props.Credentials = map[string]interface{}{
"credentialsType": string(datastore.CredentialsTypeAccountKey),
"secrets": map[string]interface{}{
"secretsType": "AccountKey",
"key": accountKey,
},
}
}

sasToken := d.Get("shared_access_signature").(string)
if sasToken != "" {
props.Credentials = map[string]interface{}{
"credentialsType": string(datastore.CredentialsTypeSas),
"secrets": map[string]interface{}{
"secretsType": "Sas",
"sasToken": sasToken,
},
}
}
datastoreRaw.Properties = props

_, err = client.CreateOrUpdate(ctx, id, datastoreRaw, datastore.DefaultCreateOrUpdateOperationOptions())
if err != nil {
return fmt.Errorf("creating/updating %s: %+v", id, err)
}

d.SetId(id.ID())
return resourceMachineLearningDataStoreBlobStorageRead(d, meta)
}

func resourceMachineLearningDataStoreBlobStorageRead(d *pluginsdk.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).MachineLearning.DatastoreClient
subscriptionId := meta.(*clients.Client).Account.SubscriptionId
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
defer cancel()

id, err := datastore.ParseDataStoreID(d.Id())
if err != nil {
return err
}

resp, err := client.Get(ctx, *id)
if err != nil {
if response.WasNotFound(resp.HttpResponse) {
d.SetId("")
return nil
}
return fmt.Errorf("reading %s: %+v", *id, err)
}

workspaceId := workspaces.NewWorkspaceID(subscriptionId, id.ResourceGroupName, id.WorkspaceName)
d.Set("name", resp.Model.Name)
d.Set("workspace_id", workspaceId.ID())

data := resp.Model.Properties.(datastore.AzureBlobDatastore)

serviceDataAuth := ""
if v := data.ServiceDataAccessAuthIdentity; v != nil {
serviceDataAuth = string(*v)
}
d.Set("service_data_auth_identity", serviceDataAuth)

containerId := storageparse.NewStorageContainerResourceManagerID(subscriptionId, workspaceId.ResourceGroupName, *data.AccountName, "default", *data.ContainerName)
d.Set("storage_container_id", containerId.ID())

desc := ""
if v := data.Description; v != nil {
d.Set("description", desc)
}

d.Set("is_default", data.IsDefault)
return flattenAndSetTags(d, *data.Tags)
}

func resourceMachineLearningDataStoreBlobStorageDelete(d *pluginsdk.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).MachineLearning.DatastoreClient
ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
defer cancel()

id, err := datastore.ParseDataStoreID(d.Id())
if err != nil {
return err
}

if _, err := client.Delete(ctx, *id); err != nil {
return fmt.Errorf("deleting %s: %+v", *id, err)
}

return nil
}

func expandTags(tagsMap map[string]interface{}) map[string]string {
output := make(map[string]string, len(tagsMap))

for i, v := range tagsMap {
// Validate should have ignored this error already
value, _ := tags.TagValueToString(v)
output[i] = value
}

return output
}

func flattenAndSetTags(d *pluginsdk.ResourceData, tagMap map[string]string) error {
output := make(map[string]interface{}, len(tagMap))
for i, v := range tagMap {
output[i] = v
}

if err := d.Set("tags", output); err != nil {
return fmt.Errorf("setting `tags`: %s", err)
}

return nil
}
Loading