Skip to content

Commit

Permalink
azurerm_maching_learning_workspace - support for the `feature_store…
Browse files Browse the repository at this point in the history
…` property (#24716)

* machine learning workspace feature store

* format
  • Loading branch information
xuzhang3 authored Feb 8, 2024
1 parent 0ea6b55 commit f48d0e4
Show file tree
Hide file tree
Showing 3 changed files with 252 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ package machinelearning

import (
"fmt"
"strings"
"time"

"github.com/hashicorp/go-azure-helpers/lang/pointer"
Expand Down Expand Up @@ -94,6 +95,39 @@ func resourceMachineLearningWorkspace() *pluginsdk.Resource {

"identity": commonschema.SystemAssignedUserAssignedIdentityRequired(),

"kind": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{
"Default",
"FeatureStore",
}, false),
Default: "Default",
},

"feature_store": {
Type: pluginsdk.TypeList,
Optional: true,
MaxItems: 1,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"computer_spark_runtime_version": {
Type: pluginsdk.TypeString,
Optional: true,
},
"offline_connection_name": {
Type: pluginsdk.TypeString,
Optional: true,
},

"online_connection_name": {
Type: pluginsdk.TypeString,
Optional: true,
},
},
},
},

"primary_user_assigned_identity": {
Type: pluginsdk.TypeString,
Optional: true,
Expand Down Expand Up @@ -250,6 +284,7 @@ func resourceMachineLearningWorkspaceCreateOrUpdate(d *pluginsdk.ResourceData, m
Name: d.Get("sku_name").(string),
Tier: pointer.To(workspaces.SkuTier(d.Get("sku_name").(string))),
},
Kind: utils.String(d.Get("kind").(string)),

Identity: expandedIdentity,
Properties: &workspaces.WorkspaceProperties{
Expand Down Expand Up @@ -290,6 +325,18 @@ func resourceMachineLearningWorkspaceCreateOrUpdate(d *pluginsdk.ResourceData, m
workspace.Properties.PrimaryUserAssignedIdentity = pointer.To(v.(string))
}

featureStore := expandMachineLearningWorkspaceFeatureStore(d.Get("feature_store").([]interface{}))
if strings.EqualFold(*workspace.Kind, "Default") {
if featureStore != nil {
return fmt.Errorf("`feature_store` can only be set when `kind` is `FeatureStore`")
}
} else {
if featureStore == nil {
return fmt.Errorf("`feature_store` can not be empty when `kind` is `FeatureStore`")
}
workspace.Properties.FeatureStoreSettings = featureStore
}

future, err := client.CreateOrUpdate(ctx, id, workspace)
if err != nil {
return fmt.Errorf("creating/updating %s: %+v", id, err)
Expand Down Expand Up @@ -333,6 +380,8 @@ func resourceMachineLearningWorkspaceRead(d *pluginsdk.ResourceData, meta interf
d.Set("sku_name", sku.Name)
}

d.Set("kind", resp.Model.Kind)

if props := resp.Model.Properties; props != nil {
d.Set("application_insights_id", props.ApplicationInsights)
d.Set("storage_account_id", props.StorageAccount)
Expand Down Expand Up @@ -367,6 +416,11 @@ func resourceMachineLearningWorkspaceRead(d *pluginsdk.ResourceData, meta interf
return fmt.Errorf("setting `identity`: %+v", err)
}

featureStoreSettings := flattenMachineLearningWorkspaceFeatureStore(resp.Model.Properties.FeatureStoreSettings)
if err := d.Set("feature_store", featureStoreSettings); err != nil {
return fmt.Errorf("setting `feature_store`: %+v", err)
}

flattenedEncryption, err := flattenMachineLearningWorkspaceEncryption(resp.Model.Properties.Encryption)
if err != nil {
return fmt.Errorf("flattening `encryption`: %+v", err)
Expand Down Expand Up @@ -511,3 +565,56 @@ func flattenMachineLearningWorkspaceEncryption(input *workspaces.EncryptionPrope
},
}, nil
}

func expandMachineLearningWorkspaceFeatureStore(input []interface{}) *workspaces.FeatureStoreSettings {
if len(input) == 0 || input[0] == nil {
return nil
}

raw := input[0].(map[string]interface{})
out := workspaces.FeatureStoreSettings{}

if raw["computer_spark_runtime_version"].(string) != "" {
out.ComputeRuntime = &workspaces.ComputeRuntimeDto{
SparkRuntimeVersion: utils.String(raw["computer_spark_runtime_version"].(string)),
}
}

if raw["offline_connection_name"].(string) != "" {
out.OfflineStoreConnectionName = utils.String(raw["offline_connection_name"].(string))
}

if raw["online_connection_name"].(string) != "" {
out.OnlineStoreConnectionName = utils.String(raw["online_connection_name"].(string))
}
return &out
}

func flattenMachineLearningWorkspaceFeatureStore(input *workspaces.FeatureStoreSettings) *[]interface{} {
if input == nil {
return &[]interface{}{}
}

computerSparkRunTimeVersion := ""
offlineConnectionName := ""
onlineConnectionName := ""

if input.ComputeRuntime != nil && input.ComputeRuntime.SparkRuntimeVersion != nil {
computerSparkRunTimeVersion = *input.ComputeRuntime.SparkRuntimeVersion
}
if input.OfflineStoreConnectionName != nil {
offlineConnectionName = *input.OfflineStoreConnectionName
}

if input.OnlineStoreConnectionName != nil {
onlineConnectionName = *input.OnlineStoreConnectionName
}

return &[]interface{}{
map[string]interface{}{
"computer_spark_runtime_version": computerSparkRunTimeVersion,
"offline_connection_name": offlineConnectionName,
"online_connection_name": onlineConnectionName,
},
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,78 @@ func TestAccMachineLearningWorkspace_systemAssignedAndCustomManagedKey(t *testin
})
}

func TestAccMachineLearningWorkspace_featureStore(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_machine_learning_workspace", "test")
r := WorkspaceResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.featureStore(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("kind").Exists(),
),
},
data.ImportStep(),
})
}

func TestAccMachineLearningWorkspace_featureStoreUpdate(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_machine_learning_workspace", "test")
r := WorkspaceResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.featureStore(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("kind").Exists(),
),
},
data.ImportStep(),
{
Config: r.featureStoreUpdate(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("kind").Exists(),
),
},
data.ImportStep(),
})
}

func TestAccMachineLearningWorkspace_kindUpdate(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_machine_learning_workspace", "test")
r := WorkspaceResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.featureStore(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("kind").Exists(),
),
},
data.ImportStep(),
{
Config: r.basic(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("kind").Exists(),
),
},
data.ImportStep(),
{
Config: r.featureStore(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("kind").Exists(),
),
},
data.ImportStep(),
})
}

func (r WorkspaceResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
workspacesClient := client.MachineLearning.Workspaces
id, err := workspaces.ParseWorkspaceID(state.ID)
Expand Down Expand Up @@ -851,3 +923,59 @@ resource "azurerm_machine_learning_workspace" "test" {
}
`, r.template(data), data.RandomInteger)
}

func (r WorkspaceResource) featureStore(data acceptance.TestData) string {
template := r.template(data)
return fmt.Sprintf(`
%s
resource "azurerm_machine_learning_workspace" "test" {
name = "acctest-MLW-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
application_insights_id = azurerm_application_insights.test.id
key_vault_id = azurerm_key_vault.test.id
storage_account_id = azurerm_storage_account.test.id
kind = "FeatureStore"
feature_store {
computer_spark_runtime_version = "3.1"
offline_connection_name = "offlineStoreConnectionName"
online_connection_name = "onlineStoreConnectionName"
}
identity {
type = "SystemAssigned"
}
}
`, template, data.RandomInteger)
}

func (r WorkspaceResource) featureStoreUpdate(data acceptance.TestData) string {
template := r.template(data)
return fmt.Sprintf(`
%s
resource "azurerm_machine_learning_workspace" "test" {
name = "acctest-MLW-%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
application_insights_id = azurerm_application_insights.test.id
key_vault_id = azurerm_key_vault.test.id
storage_account_id = azurerm_storage_account.test.id
kind = "FeatureStore"
feature_store {
computer_spark_runtime_version = "3.5"
offline_connection_name = "offlineStoreConnectionNameUpdate"
online_connection_name = "onlineStoreConnectionNameUpdate"
}
identity {
type = "SystemAssigned"
}
}
`, template, data.RandomInteger)
}
17 changes: 17 additions & 0 deletions website/docs/r/machine_learning_workspace.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,8 @@ The following arguments are supported:

* `identity` - (Required) An `identity` block as defined below.

* `kind` - (Optional) The type of the Workspace. Possible values are `Default`, `FeatureStore`. Defaults to `Default`

* `container_registry_id` - (Optional) The ID of the container registry associated with this Machine Learning Workspace. Changing this forces a new resource to be created.

-> **NOTE:** The `admin_enabled` should be `true` in order to associate the Container Registry to this Machine Learning Workspace.
Expand All @@ -374,6 +376,8 @@ The following arguments are supported:

* `encryption` - (Optional) An `encryption` block as defined below. Changing this forces a new resource to be created.

* `feature_store` - (Optional) A `feature_store` block as defined below.

* `friendly_name` - (Optional) Display name for this Machine Learning Workspace.

* `high_business_impact` - (Optional) Flag to signal High Business Impact (HBI) data in the workspace and reduce diagnostic data collected by the service. Changing this forces a new resource to be created.
Expand Down Expand Up @@ -408,6 +412,19 @@ An `encryption` block supports the following:

~> **Note:** `user_assigned_identity_id` must set when`identity.type` is `UserAssigned` or service won't be able to find the assigned permissions.

---

An `feature_store` block supports the following:

* `computer_spark_runtime_version` - (Optional) The version of Spark runtime.

* `offline_connection_name` - (Optional) The name of offline store connection.

* `online_connection_name` - (Optional) The name of online store connection.

~> **Note:** `feature_store` must be set when`kind` is `FeatureStore`


## Attributes Reference

In addition to the Arguments listed above - the following Attributes are exported:
Expand Down

0 comments on commit f48d0e4

Please sign in to comment.