diff --git a/internal/services/batch/batch_account_resource.go b/internal/services/batch/batch_account_resource.go index 1802f170541e..edb47c363277 100644 --- a/internal/services/batch/batch_account_resource.go +++ b/internal/services/batch/batch_account_resource.go @@ -222,7 +222,7 @@ func resourceBatchAccountCreate(d *pluginsdk.ResourceData, meta interface{}) err Location: location, Properties: &batchaccount.BatchAccountCreateProperties{ PoolAllocationMode: &poolAllocationMode, - PublicNetworkAccess: utils.ToPtr(batchaccount.PublicNetworkAccessTypeEnabled), + PublicNetworkAccess: pointer.To(batchaccount.PublicNetworkAccessTypeEnabled), Encryption: encryption, AllowedAuthenticationModes: expandAllowedAuthenticationModes(d.Get("allowed_authentication_modes").(*pluginsdk.Set).List()), }, @@ -231,7 +231,7 @@ func resourceBatchAccountCreate(d *pluginsdk.ResourceData, meta interface{}) err } if enabled := d.Get("public_network_access_enabled").(bool); !enabled { - parameters.Properties.PublicNetworkAccess = utils.ToPtr(batchaccount.PublicNetworkAccessTypeDisabled) + parameters.Properties.PublicNetworkAccess = pointer.To(batchaccount.PublicNetworkAccessTypeDisabled) } if v, ok := d.GetOk("network_profile"); ok { @@ -273,7 +273,7 @@ func resourceBatchAccountCreate(d *pluginsdk.ResourceData, meta interface{}) err } parameters.Properties.AutoStorage = &batchaccount.AutoStorageBaseProperties{ StorageAccountId: &storageAccountId, - AuthenticationMode: utils.ToPtr(batchaccount.AutoStorageAuthenticationMode(authMode)), + AuthenticationMode: pointer.To(batchaccount.AutoStorageAuthenticationMode(authMode)), } } @@ -422,9 +422,9 @@ func resourceBatchAccountUpdate(d *pluginsdk.ResourceData, meta interface{}) err if d.HasChange("public_network_access_enabled") { if d.Get("public_network_access_enabled").(bool) { - parameters.Properties.PublicNetworkAccess = utils.ToPtr(batchaccount.PublicNetworkAccessTypeEnabled) + parameters.Properties.PublicNetworkAccess = pointer.To(batchaccount.PublicNetworkAccessTypeEnabled) } else { - parameters.Properties.PublicNetworkAccess = utils.ToPtr(batchaccount.PublicNetworkAccessTypeDisabled) + parameters.Properties.PublicNetworkAccess = pointer.To(batchaccount.PublicNetworkAccessTypeDisabled) } } @@ -453,7 +453,7 @@ func resourceBatchAccountUpdate(d *pluginsdk.ResourceData, meta interface{}) err if storageAccountId != "" { parameters.Properties.AutoStorage = &batchaccount.AutoStorageBaseProperties{ StorageAccountId: &storageAccountId, - AuthenticationMode: utils.ToPtr(batchaccount.AutoStorageAuthenticationMode(authMode)), + AuthenticationMode: pointer.To(batchaccount.AutoStorageAuthenticationMode(authMode)), } } @@ -492,7 +492,7 @@ func resourceBatchAccountDelete(d *pluginsdk.ResourceData, meta interface{}) err func expandEncryption(e []interface{}) *batchaccount.EncryptionProperties { defaultEnc := batchaccount.EncryptionProperties{ - KeySource: utils.ToPtr(batchaccount.KeySourceMicrosoftPointBatch), + KeySource: pointer.To(batchaccount.KeySourceMicrosoftPointBatch), } if len(e) == 0 || e[0] == nil { @@ -502,7 +502,7 @@ func expandEncryption(e []interface{}) *batchaccount.EncryptionProperties { v := e[0].(map[string]interface{}) keyId := v["key_vault_key_id"].(string) encryptionProperty := batchaccount.EncryptionProperties{ - KeySource: utils.ToPtr(batchaccount.KeySourceMicrosoftPointKeyVault), + KeySource: pointer.To(batchaccount.KeySourceMicrosoftPointKeyVault), KeyVaultProperties: &batchaccount.KeyVaultProperties{ KeyIdentifier: &keyId, }, diff --git a/internal/services/bot/bot_service_azure_bot_resource_test.go b/internal/services/bot/bot_service_azure_bot_resource_test.go index 29f0553b2ba5..33682627fc25 100644 --- a/internal/services/bot/bot_service_azure_bot_resource_test.go +++ b/internal/services/bot/bot_service_azure_bot_resource_test.go @@ -189,6 +189,7 @@ resource "azurerm_bot_service_azure_bot" "test" { microsoft_app_id = data.azurerm_client_config.current.client_id sku = "F0" local_authentication_enabled = false + public_network_access_enabled = false icon_url = "https://registry.terraform.io/images/providers/azure.png" endpoint = "https://example.com" developer_app_insights_api_key = azurerm_application_insights_api_key.test.api_key diff --git a/internal/services/bot/bot_service_resource_base.go b/internal/services/bot/bot_service_resource_base.go index d639ea816e45..1942543aca8b 100644 --- a/internal/services/bot/bot_service_resource_base.go +++ b/internal/services/bot/bot_service_resource_base.go @@ -130,6 +130,12 @@ func (br botBaseResource) arguments(fields map[string]*pluginsdk.Schema) map[str ValidateFunc: validation.StringIsNotEmpty, }, + "public_network_access_enabled": { + Type: pluginsdk.TypeBool, + Optional: true, + Default: true, + }, + "streaming_endpoint_enabled": { Type: pluginsdk.TypeBool, Optional: true, @@ -181,6 +187,11 @@ func (br botBaseResource) createFunc(resourceName, botKind string) sdk.ResourceF displayName = id.Name } + publicNetworkEnabled := botservice.PublicNetworkAccessEnabled + if !metadata.ResourceData.Get("public_network_access_enabled").(bool) { + publicNetworkEnabled = botservice.PublicNetworkAccessDisabled + } + props := botservice.Bot{ Location: utils.String(metadata.ResourceData.Get("location").(string)), Sku: &botservice.Sku{ @@ -188,17 +199,18 @@ func (br botBaseResource) createFunc(resourceName, botKind string) sdk.ResourceF }, Kind: botservice.Kind(botKind), Properties: &botservice.BotProperties{ - DisplayName: utils.String(displayName), - Endpoint: utils.String(metadata.ResourceData.Get("endpoint").(string)), - MsaAppID: utils.String(metadata.ResourceData.Get("microsoft_app_id").(string)), - DeveloperAppInsightKey: utils.String(metadata.ResourceData.Get("developer_app_insights_key").(string)), - DeveloperAppInsightsAPIKey: utils.String(metadata.ResourceData.Get("developer_app_insights_api_key").(string)), - DeveloperAppInsightsApplicationID: utils.String(metadata.ResourceData.Get("developer_app_insights_application_id").(string)), - DisableLocalAuth: utils.Bool(!metadata.ResourceData.Get("local_authentication_enabled").(bool)), + DisplayName: pointer.To(displayName), + Endpoint: pointer.To(metadata.ResourceData.Get("endpoint").(string)), + MsaAppID: pointer.To(metadata.ResourceData.Get("microsoft_app_id").(string)), + DeveloperAppInsightKey: pointer.To(metadata.ResourceData.Get("developer_app_insights_key").(string)), + DeveloperAppInsightsAPIKey: pointer.To(metadata.ResourceData.Get("developer_app_insights_api_key").(string)), + DeveloperAppInsightsApplicationID: pointer.To(metadata.ResourceData.Get("developer_app_insights_application_id").(string)), + DisableLocalAuth: pointer.To(!metadata.ResourceData.Get("local_authentication_enabled").(bool)), LuisAppIds: utils.ExpandStringSlice(metadata.ResourceData.Get("luis_app_ids").([]interface{})), - LuisKey: utils.String(metadata.ResourceData.Get("luis_key").(string)), - IsStreamingSupported: utils.Bool(metadata.ResourceData.Get("streaming_endpoint_enabled").(bool)), - IconURL: utils.String(metadata.ResourceData.Get("icon_url").(string)), + LuisKey: pointer.To(metadata.ResourceData.Get("luis_key").(string)), + PublicNetworkAccess: publicNetworkEnabled, + IsStreamingSupported: pointer.To(metadata.ResourceData.Get("streaming_endpoint_enabled").(bool)), + IconURL: pointer.To(metadata.ResourceData.Get("icon_url").(string)), }, Tags: tags.Expand(metadata.ResourceData.Get("tags").(map[string]interface{})), } @@ -208,11 +220,11 @@ func (br botBaseResource) createFunc(resourceName, botKind string) sdk.ResourceF } if v, ok := metadata.ResourceData.GetOk("microsoft_app_tenant_id"); ok { - props.Properties.MsaAppTenantID = utils.String(v.(string)) + props.Properties.MsaAppTenantID = pointer.To(v.(string)) } if v, ok := metadata.ResourceData.GetOk("microsoft_app_msi_id"); ok { - props.Properties.MsaAppMSIResourceID = utils.String(v.(string)) + props.Properties.MsaAppMSIResourceID = pointer.To(v.(string)) } if _, err := client.Create(ctx, id.ResourceGroup, id.Name, props); err != nil { @@ -225,6 +237,79 @@ func (br botBaseResource) createFunc(resourceName, botKind string) sdk.ResourceF } } +func (br botBaseResource) updateFunc() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.Bot.BotClient + id, err := parse.BotServiceID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + existing, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("retrieving %s: %+v", *id, err) + } + + if metadata.ResourceData.HasChange("display_name") { + existing.Properties.DisplayName = utils.String(metadata.ResourceData.Get("display_name").(string)) + } + + if metadata.ResourceData.HasChange("endpoint") { + existing.Properties.Endpoint = utils.String(metadata.ResourceData.Get("endpoint").(string)) + } + + if metadata.ResourceData.HasChange("developer_app_insights_key") { + existing.Properties.DeveloperAppInsightKey = utils.String(metadata.ResourceData.Get("developer_app_insights_key").(string)) + } + + if metadata.ResourceData.HasChange("developer_app_insights_api_key") { + existing.Properties.DeveloperAppInsightsAPIKey = utils.String(metadata.ResourceData.Get("developer_app_insights_api_key").(string)) + } + + if metadata.ResourceData.HasChange("developer_app_insights_application_id") { + existing.Properties.DeveloperAppInsightsApplicationID = utils.String(metadata.ResourceData.Get("developer_app_insights_application_id").(string)) + } + + if metadata.ResourceData.HasChange("local_authentication_enabled") { + existing.Properties.DisableLocalAuth = utils.Bool(!metadata.ResourceData.Get("local_authentication_enabled").(bool)) + } + + if metadata.ResourceData.HasChange("luis_app_ids") { + existing.Properties.LuisAppIds = utils.ExpandStringSlice(metadata.ResourceData.Get("luis_app_ids").([]interface{})) + } + + if metadata.ResourceData.HasChange("luis_key") { + existing.Properties.LuisKey = utils.String(metadata.ResourceData.Get("luis_key").(string)) + } + + if metadata.ResourceData.HasChange("public_network_access_enabled") { + if metadata.ResourceData.Get("public_network_access_enabled").(bool) { + existing.Properties.PublicNetworkAccess = botservice.PublicNetworkAccessEnabled + } else { + existing.Properties.PublicNetworkAccess = botservice.PublicNetworkAccessDisabled + } + existing.Properties.LuisKey = utils.String(metadata.ResourceData.Get("public_network_access_enabled").(string)) + } + + if metadata.ResourceData.HasChange("streaming_endpoint_enabled") { + existing.Properties.IsStreamingSupported = utils.Bool(metadata.ResourceData.Get("streaming_endpoint_enabled").(bool)) + } + + if metadata.ResourceData.HasChange("icon_url") { + existing.Properties.IconURL = utils.String(metadata.ResourceData.Get("icon_url").(string)) + } + + if _, err := client.Update(ctx, id.ResourceGroup, id.Name, existing); err != nil { + return fmt.Errorf("updating %s: %+v", *id, err) + } + + return nil + }, + } +} + func (br botBaseResource) readFunc() sdk.ResourceFunc { return sdk.ResourceFunc{ Timeout: 5 * time.Minute, @@ -317,6 +402,12 @@ func (br botBaseResource) readFunc() sdk.ResourceFunc { } metadata.ResourceData.Set("local_authentication_enabled", localAuthEnabled) + publicNetworkAccessEnabled := true + if v := props.PublicNetworkAccess; v != botservice.PublicNetworkAccessDisabled { + publicNetworkAccessEnabled = false + } + metadata.ResourceData.Set("public_network_access_enabled", publicNetworkAccessEnabled) + var luisAppIds []string if v := props.LuisAppIds; v != nil { luisAppIds = *v @@ -356,70 +447,6 @@ func (br botBaseResource) deleteFunc() sdk.ResourceFunc { } } -func (br botBaseResource) updateFunc() sdk.ResourceFunc { - return sdk.ResourceFunc{ - Timeout: 30 * time.Minute, - Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { - client := metadata.Client.Bot.BotClient - id, err := parse.BotServiceID(metadata.ResourceData.Id()) - if err != nil { - return err - } - - existing, err := client.Get(ctx, id.ResourceGroup, id.Name) - if err != nil { - return fmt.Errorf("retrieving %s: %+v", *id, err) - } - - if metadata.ResourceData.HasChange("display_name") { - existing.Properties.DisplayName = utils.String(metadata.ResourceData.Get("display_name").(string)) - } - - if metadata.ResourceData.HasChange("endpoint") { - existing.Properties.Endpoint = utils.String(metadata.ResourceData.Get("endpoint").(string)) - } - - if metadata.ResourceData.HasChange("developer_app_insights_key") { - existing.Properties.DeveloperAppInsightKey = utils.String(metadata.ResourceData.Get("developer_app_insights_key").(string)) - } - - if metadata.ResourceData.HasChange("developer_app_insights_api_key") { - existing.Properties.DeveloperAppInsightsAPIKey = utils.String(metadata.ResourceData.Get("developer_app_insights_api_key").(string)) - } - - if metadata.ResourceData.HasChange("developer_app_insights_application_id") { - existing.Properties.DeveloperAppInsightsApplicationID = utils.String(metadata.ResourceData.Get("developer_app_insights_application_id").(string)) - } - - if metadata.ResourceData.HasChange("local_authentication_enabled") { - existing.Properties.DisableLocalAuth = utils.Bool(!metadata.ResourceData.Get("local_authentication_enabled").(bool)) - } - - if metadata.ResourceData.HasChange("luis_app_ids") { - existing.Properties.LuisAppIds = utils.ExpandStringSlice(metadata.ResourceData.Get("luis_app_ids").([]interface{})) - } - - if metadata.ResourceData.HasChange("luis_key") { - existing.Properties.LuisKey = utils.String(metadata.ResourceData.Get("luis_key").(string)) - } - - if metadata.ResourceData.HasChange("streaming_endpoint_enabled") { - existing.Properties.IsStreamingSupported = utils.Bool(metadata.ResourceData.Get("streaming_endpoint_enabled").(bool)) - } - - if metadata.ResourceData.HasChange("icon_url") { - existing.Properties.IconURL = utils.String(metadata.ResourceData.Get("icon_url").(string)) - } - - if _, err := client.Update(ctx, id.ResourceGroup, id.Name, existing); err != nil { - return fmt.Errorf("updating %s: %+v", *id, err) - } - - return nil - }, - } -} - func (br botBaseResource) importerFunc(expectKind string) sdk.ResourceRunFunc { return func(ctx context.Context, metadata sdk.ResourceMetaData) error { client := metadata.Client.Bot.BotClient diff --git a/internal/services/containers/kubernetes_addons.go b/internal/services/containers/kubernetes_addons.go index b11e47a16ce9..50269332f764 100644 --- a/internal/services/containers/kubernetes_addons.go +++ b/internal/services/containers/kubernetes_addons.go @@ -8,6 +8,7 @@ import ( "strings" "github.com/Azure/go-autorest/autorest/azure" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-sdk/resource-manager/containerservice/2023-04-02-preview/managedclusters" "github.com/hashicorp/go-azure-sdk/resource-manager/operationalinsights/2020-08-01/workspaces" @@ -16,7 +17,6 @@ import ( applicationGatewayValidate "github.com/hashicorp/terraform-provider-azurerm/internal/services/network/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) const ( @@ -358,7 +358,7 @@ func expandKubernetesAddOns(d *pluginsdk.ResourceData, input map[string]interfac v := input["azure_policy_enabled"].(bool) props := managedclusters.ManagedClusterAddonProfile{ Enabled: v, - Config: utils.ToPtr(map[string]string{ + Config: pointer.To(map[string]string{ "version": "v2", }), } diff --git a/internal/services/containers/kubernetes_cluster_node_pool_resource.go b/internal/services/containers/kubernetes_cluster_node_pool_resource.go index 4e8c8ad1bce6..400afb573278 100644 --- a/internal/services/containers/kubernetes_cluster_node_pool_resource.go +++ b/internal/services/containers/kubernetes_cluster_node_pool_resource.go @@ -11,6 +11,7 @@ import ( "strconv" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" @@ -461,19 +462,19 @@ func resourceKubernetesClusterNodePoolCreate(d *pluginsdk.ResourceData, meta int t := d.Get("tags").(map[string]interface{}) profile := agentpools.ManagedClusterAgentPoolProfileProperties{ - OsType: utils.ToPtr(agentpools.OSType(osType)), - EnableAutoScaling: utils.Bool(enableAutoScaling), - EnableCustomCATrust: utils.Bool(d.Get("custom_ca_trust_enabled").(bool)), - EnableFIPS: utils.Bool(d.Get("fips_enabled").(bool)), - EnableEncryptionAtHost: utils.Bool(d.Get("enable_host_encryption").(bool)), - EnableUltraSSD: utils.Bool(d.Get("ultra_ssd_enabled").(bool)), - EnableNodePublicIP: utils.Bool(d.Get("enable_node_public_ip").(bool)), - KubeletDiskType: utils.ToPtr(agentpools.KubeletDiskType(d.Get("kubelet_disk_type").(string))), - Mode: utils.ToPtr(mode), - ScaleSetPriority: utils.ToPtr(agentpools.ScaleSetPriority(d.Get("priority").(string))), + OsType: pointer.To(agentpools.OSType(osType)), + EnableAutoScaling: pointer.To(enableAutoScaling), + EnableCustomCATrust: pointer.To(d.Get("custom_ca_trust_enabled").(bool)), + EnableFIPS: pointer.To(d.Get("fips_enabled").(bool)), + EnableEncryptionAtHost: pointer.To(d.Get("enable_host_encryption").(bool)), + EnableUltraSSD: pointer.To(d.Get("ultra_ssd_enabled").(bool)), + EnableNodePublicIP: pointer.To(d.Get("enable_node_public_ip").(bool)), + KubeletDiskType: pointer.To(agentpools.KubeletDiskType(d.Get("kubelet_disk_type").(string))), + Mode: pointer.To(mode), + ScaleSetPriority: pointer.To(agentpools.ScaleSetPriority(d.Get("priority").(string))), Tags: tags.Expand(t), - Type: utils.ToPtr(agentpools.AgentPoolTypeVirtualMachineScaleSets), - VMSize: utils.String(d.Get("vm_size").(string)), + Type: pointer.To(agentpools.AgentPoolTypeVirtualMachineScaleSets), + VMSize: pointer.To(d.Get("vm_size").(string)), UpgradeSettings: expandAgentPoolUpgradeSettings(d.Get("upgrade_settings").([]interface{})), WindowsProfile: expandAgentPoolWindowsProfile(d.Get("windows_profile").([]interface{})), @@ -482,23 +483,23 @@ func resourceKubernetesClusterNodePoolCreate(d *pluginsdk.ResourceData, meta int } if gpuInstanceProfile := d.Get("gpu_instance").(string); gpuInstanceProfile != "" { - profile.GpuInstanceProfile = utils.ToPtr(agentpools.GPUInstanceProfile(gpuInstanceProfile)) + profile.GpuInstanceProfile = pointer.To(agentpools.GPUInstanceProfile(gpuInstanceProfile)) } if osSku := d.Get("os_sku").(string); osSku != "" { - profile.OsSKU = utils.ToPtr(agentpools.OSSKU(osSku)) + profile.OsSKU = pointer.To(agentpools.OSSKU(osSku)) } if scaleDownMode := d.Get("scale_down_mode").(string); scaleDownMode != "" { - profile.ScaleDownMode = utils.ToPtr(agentpools.ScaleDownMode(scaleDownMode)) + profile.ScaleDownMode = pointer.To(agentpools.ScaleDownMode(scaleDownMode)) } if workloadRuntime := d.Get("workload_runtime").(string); workloadRuntime != "" { - profile.WorkloadRuntime = utils.ToPtr(agentpools.WorkloadRuntime(workloadRuntime)) + profile.WorkloadRuntime = pointer.To(agentpools.WorkloadRuntime(workloadRuntime)) } if priority == string(managedclusters.ScaleSetPrioritySpot) { - profile.ScaleSetEvictionPolicy = utils.ToPtr(agentpools.ScaleSetEvictionPolicy(evictionPolicy)) + profile.ScaleSetEvictionPolicy = pointer.To(agentpools.ScaleSetEvictionPolicy(evictionPolicy)) profile.SpotMaxPrice = utils.Float(spotMaxPrice) } else { if evictionPolicy != "" { @@ -560,7 +561,7 @@ func resourceKubernetesClusterNodePoolCreate(d *pluginsdk.ResourceData, meta int } if osDiskType := d.Get("os_disk_type").(string); osDiskType != "" { - profile.OsDiskType = utils.ToPtr(agentpools.OSDiskType(osDiskType)) + profile.OsDiskType = pointer.To(agentpools.OSDiskType(osDiskType)) } if podSubnetID := d.Get("pod_subnet_id").(string); podSubnetID != "" { diff --git a/internal/services/containers/kubernetes_cluster_resource.go b/internal/services/containers/kubernetes_cluster_resource.go index fb111cad461d..10498877e63f 100644 --- a/internal/services/containers/kubernetes_cluster_resource.go +++ b/internal/services/containers/kubernetes_cluster_resource.go @@ -1749,8 +1749,8 @@ func resourceKubernetesClusterCreate(d *pluginsdk.ResourceData, meta interface{} ExtendedLocation: expandEdgeZone(d.Get("edge_zone").(string)), Location: location, Sku: &managedclusters.ManagedClusterSKU{ - Name: utils.ToPtr(managedclusters.ManagedClusterSKUNameBase), // the only possible value at this point - Tier: utils.ToPtr(managedclusters.ManagedClusterSKUTier(d.Get("sku_tier").(string))), + Name: pointer.To(managedclusters.ManagedClusterSKUNameBase), // the only possible value at this point + Tier: pointer.To(managedclusters.ManagedClusterSKUTier(d.Get("sku_tier").(string))), }, Properties: &managedclusters.ManagedClusterProperties{ ApiServerAccessProfile: apiAccessProfile, @@ -3270,19 +3270,19 @@ func expandKubernetesClusterNetworkProfile(input []interface{}) (*managedcluster } networkProfile := managedclusters.ContainerServiceNetworkProfile{ - NetworkPlugin: utils.ToPtr(managedclusters.NetworkPlugin(networkPlugin)), - NetworkMode: utils.ToPtr(managedclusters.NetworkMode(networkMode)), - NetworkPolicy: utils.ToPtr(managedclusters.NetworkPolicy(networkPolicy)), - LoadBalancerSku: utils.ToPtr(managedclusters.LoadBalancerSku(loadBalancerSku)), - OutboundType: utils.ToPtr(managedclusters.OutboundType(outboundType)), + NetworkPlugin: pointer.To(managedclusters.NetworkPlugin(networkPlugin)), + NetworkMode: pointer.To(managedclusters.NetworkMode(networkMode)), + NetworkPolicy: pointer.To(managedclusters.NetworkPolicy(networkPolicy)), + LoadBalancerSku: pointer.To(managedclusters.LoadBalancerSku(loadBalancerSku)), + OutboundType: pointer.To(managedclusters.OutboundType(outboundType)), IPFamilies: ipVersions, } if ebpfDataPlane := config["ebpf_data_plane"].(string); ebpfDataPlane != "" { - networkProfile.NetworkDataplane = utils.ToPtr(managedclusters.NetworkDataplane(ebpfDataPlane)) + networkProfile.NetworkDataplane = pointer.To(managedclusters.NetworkDataplane(ebpfDataPlane)) } if networkPluginMode := config["network_plugin_mode"].(string); networkPluginMode != "" { - networkProfile.NetworkPluginMode = utils.ToPtr(managedclusters.NetworkPluginMode(networkPluginMode)) + networkProfile.NetworkPluginMode = pointer.To(managedclusters.NetworkPluginMode(networkPluginMode)) } if len(loadBalancerProfileRaw) > 0 { @@ -3929,7 +3929,7 @@ func expandKubernetesClusterAutoScalerProfile(input []interface{}) *managedclust return &managedclusters.ManagedClusterPropertiesAutoScalerProfile{ BalanceSimilarNodeGroups: utils.String(strconv.FormatBool(balanceSimilarNodeGroups)), - Expander: utils.ToPtr(managedclusters.Expander(expander)), + Expander: pointer.To(managedclusters.Expander(expander)), MaxGracefulTerminationSec: utils.String(maxGracefulTerminationSec), MaxNodeProvisionTime: utils.String(maxNodeProvisionTime), MaxTotalUnreadyPercentage: utils.String(maxUnreadyPercentage), @@ -4130,8 +4130,8 @@ func expandKubernetesClusterMaintenanceConfigurationTimeSpans(input []interface{ start, _ := time.Parse(time.RFC3339, v["start"].(string)) end, _ := time.Parse(time.RFC3339, v["end"].(string)) results = append(results, maintenanceconfigurations.TimeSpan{ - Start: utils.ToPtr(start.Format("2006-01-02T15:04:05Z07:00")), - End: utils.ToPtr(end.Format("2006-01-02T15:04:05Z07:00")), + Start: pointer.To(start.Format("2006-01-02T15:04:05Z07:00")), + End: pointer.To(end.Format("2006-01-02T15:04:05Z07:00")), }) } return &results @@ -4156,7 +4156,7 @@ func expandKubernetesClusterMaintenanceConfigurationTimeInWeeks(input []interfac for _, item := range input { v := item.(map[string]interface{}) results = append(results, maintenanceconfigurations.TimeInWeek{ - Day: utils.ToPtr(maintenanceconfigurations.WeekDay(v["day"].(string))), + Day: pointer.To(maintenanceconfigurations.WeekDay(v["day"].(string))), HourSlots: utils.ExpandInt64Slice(v["hours"].(*pluginsdk.Set).List()), }) } diff --git a/internal/services/containers/kubernetes_nodepool.go b/internal/services/containers/kubernetes_nodepool.go index 3dbeada0a889..ab286512517d 100644 --- a/internal/services/containers/kubernetes_nodepool.go +++ b/internal/services/containers/kubernetes_nodepool.go @@ -10,6 +10,7 @@ import ( "strconv" "strings" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" @@ -871,7 +872,7 @@ func ConvertDefaultNodePoolToAgentPool(input *[]managedclusters.ManagedClusterAg TransparentHugePageEnabled: linuxOsConfigRaw.TransparentHugePageEnabled, } if sysctlsRaw := linuxOsConfigRaw.Sysctls; sysctlsRaw != nil { - linuxOsConfig.Sysctls = utils.ToPtr(agentpools.SysctlConfig(*sysctlsRaw)) + linuxOsConfig.Sysctls = pointer.To(agentpools.SysctlConfig(*sysctlsRaw)) } agentpool.Properties.LinuxOSConfig = &linuxOsConfig } @@ -890,35 +891,35 @@ func ConvertDefaultNodePoolToAgentPool(input *[]managedclusters.ManagedClusterAg agentpool.Properties.NetworkProfile = &networkProfile } if osTypeNodePool := defaultCluster.OsType; osTypeNodePool != nil { - agentpool.Properties.OsType = utils.ToPtr(agentpools.OSType(string(*osTypeNodePool))) + agentpool.Properties.OsType = pointer.To(agentpools.OSType(string(*osTypeNodePool))) } if osSku := defaultCluster.OsSKU; osSku != nil { - agentpool.Properties.OsSKU = utils.ToPtr(agentpools.OSSKU(*osSku)) + agentpool.Properties.OsSKU = pointer.To(agentpools.OSSKU(*osSku)) } if kubeletDiskTypeNodePool := defaultCluster.KubeletDiskType; kubeletDiskTypeNodePool != nil { - agentpool.Properties.KubeletDiskType = utils.ToPtr(agentpools.KubeletDiskType(string(*kubeletDiskTypeNodePool))) + agentpool.Properties.KubeletDiskType = pointer.To(agentpools.KubeletDiskType(string(*kubeletDiskTypeNodePool))) } if agentPoolTypeNodePool := defaultCluster.Type; agentPoolTypeNodePool != nil { - agentpool.Properties.Type = utils.ToPtr(agentpools.AgentPoolType(string(*agentPoolTypeNodePool))) + agentpool.Properties.Type = pointer.To(agentpools.AgentPoolType(string(*agentPoolTypeNodePool))) } if scaleSetPriorityNodePool := defaultCluster.ScaleSetPriority; scaleSetPriorityNodePool != nil { - agentpool.Properties.ScaleSetPriority = utils.ToPtr(agentpools.ScaleSetPriority(string(*scaleSetPriorityNodePool))) + agentpool.Properties.ScaleSetPriority = pointer.To(agentpools.ScaleSetPriority(string(*scaleSetPriorityNodePool))) } if scaleSetEvictionPolicyNodePool := defaultCluster.ScaleSetEvictionPolicy; scaleSetEvictionPolicyNodePool != nil { - agentpool.Properties.ScaleSetEvictionPolicy = utils.ToPtr(agentpools.ScaleSetEvictionPolicy(string(*scaleSetEvictionPolicyNodePool))) + agentpool.Properties.ScaleSetEvictionPolicy = pointer.To(agentpools.ScaleSetEvictionPolicy(string(*scaleSetEvictionPolicyNodePool))) } if modeNodePool := defaultCluster.Mode; modeNodePool != nil { - agentpool.Properties.Mode = utils.ToPtr(agentpools.AgentPoolMode(string(*modeNodePool))) + agentpool.Properties.Mode = pointer.To(agentpools.AgentPoolMode(string(*modeNodePool))) } if scaleDownModeNodePool := defaultCluster.ScaleDownMode; scaleDownModeNodePool != nil { - agentpool.Properties.ScaleDownMode = utils.ToPtr(agentpools.ScaleDownMode(string(*scaleDownModeNodePool))) + agentpool.Properties.ScaleDownMode = pointer.To(agentpools.ScaleDownMode(string(*scaleDownModeNodePool))) } agentpool.Properties.UpgradeSettings = &agentpools.AgentPoolUpgradeSettings{} if upgradeSettingsNodePool := defaultCluster.UpgradeSettings; upgradeSettingsNodePool != nil && upgradeSettingsNodePool.MaxSurge != nil && *upgradeSettingsNodePool.MaxSurge != "" { agentpool.Properties.UpgradeSettings.MaxSurge = upgradeSettingsNodePool.MaxSurge } if workloadRuntimeNodePool := defaultCluster.WorkloadRuntime; workloadRuntimeNodePool != nil { - agentpool.Properties.WorkloadRuntime = utils.ToPtr(agentpools.WorkloadRuntime(string(*workloadRuntimeNodePool))) + agentpool.Properties.WorkloadRuntime = pointer.To(agentpools.WorkloadRuntime(string(*workloadRuntimeNodePool))) } if creationData := defaultCluster.CreationData; creationData != nil { @@ -930,7 +931,7 @@ func ConvertDefaultNodePoolToAgentPool(input *[]managedclusters.ManagedClusterAg } if defaultCluster.GpuInstanceProfile != nil { - agentpool.Properties.GpuInstanceProfile = utils.ToPtr(agentpools.GPUInstanceProfile(*defaultCluster.GpuInstanceProfile)) + agentpool.Properties.GpuInstanceProfile = pointer.To(agentpools.GPUInstanceProfile(*defaultCluster.GpuInstanceProfile)) } return agentpool @@ -963,23 +964,23 @@ func ExpandDefaultNodePool(d *pluginsdk.ResourceData) (*[]managedclusters.Manage EnableFIPS: utils.Bool(raw["fips_enabled"].(bool)), EnableNodePublicIP: utils.Bool(raw["enable_node_public_ip"].(bool)), EnableEncryptionAtHost: utils.Bool(raw["enable_host_encryption"].(bool)), - KubeletDiskType: utils.ToPtr(managedclusters.KubeletDiskType(raw["kubelet_disk_type"].(string))), + KubeletDiskType: pointer.To(managedclusters.KubeletDiskType(raw["kubelet_disk_type"].(string))), Name: raw["name"].(string), NodeLabels: nodeLabels, NodeTaints: nodeTaints, Tags: tags.Expand(t), - Type: utils.ToPtr(managedclusters.AgentPoolType(raw["type"].(string))), + Type: pointer.To(managedclusters.AgentPoolType(raw["type"].(string))), VMSize: utils.String(raw["vm_size"].(string)), // at this time the default node pool has to be Linux or the AKS cluster fails to provision with: // Pods not in Running status: coredns-7fc597cc45-v5z7x,coredns-autoscaler-7ccc76bfbd-djl7j,metrics-server-cbd95f966-5rl97,tunnelfront-7d9884977b-wpbvn // Windows agents can be configured via the separate node pool resource - OsType: utils.ToPtr(managedclusters.OSTypeLinux), + OsType: pointer.To(managedclusters.OSTypeLinux), // without this set the API returns: // Code="MustDefineAtLeastOneSystemPool" Message="Must define at least one system pool." // since this is the "default" node pool we can assume this is a system node pool - Mode: utils.ToPtr(managedclusters.AgentPoolModeSystem), + Mode: pointer.To(managedclusters.AgentPoolModeSystem), UpgradeSettings: expandClusterNodePoolUpgradeSettings(raw["upgrade_settings"].([]interface{})), @@ -1010,13 +1011,13 @@ func ExpandDefaultNodePool(d *pluginsdk.ResourceData) (*[]managedclusters.Manage profile.OsDiskSizeGB = utils.Int64(osDiskSizeGB) } - profile.OsDiskType = utils.ToPtr(managedclusters.OSDiskTypeManaged) + profile.OsDiskType = pointer.To(managedclusters.OSDiskTypeManaged) if osDiskType := raw["os_disk_type"].(string); osDiskType != "" { - profile.OsDiskType = utils.ToPtr(managedclusters.OSDiskType(osDiskType)) + profile.OsDiskType = pointer.To(managedclusters.OSDiskType(osDiskType)) } if osSku := raw["os_sku"].(string); osSku != "" { - profile.OsSKU = utils.ToPtr(managedclusters.OSSKU(osSku)) + profile.OsSKU = pointer.To(managedclusters.OSSKU(osSku)) } if podSubnetID := raw["pod_subnet_id"].(string); podSubnetID != "" { @@ -1026,7 +1027,7 @@ func ExpandDefaultNodePool(d *pluginsdk.ResourceData) (*[]managedclusters.Manage scaleDownModeDelete := managedclusters.ScaleDownModeDelete profile.ScaleDownMode = &scaleDownModeDelete if scaleDownMode := raw["scale_down_mode"].(string); scaleDownMode != "" { - profile.ScaleDownMode = utils.ToPtr(managedclusters.ScaleDownMode(scaleDownMode)) + profile.ScaleDownMode = pointer.To(managedclusters.ScaleDownMode(scaleDownMode)) } if snapshotId := raw["snapshot_id"].(string); snapshotId != "" { @@ -1056,7 +1057,7 @@ func ExpandDefaultNodePool(d *pluginsdk.ResourceData) (*[]managedclusters.Manage } if workloadRunTime := raw["workload_runtime"].(string); workloadRunTime != "" { - profile.WorkloadRuntime = utils.ToPtr(managedclusters.WorkloadRuntime(workloadRunTime)) + profile.WorkloadRuntime = pointer.To(managedclusters.WorkloadRuntime(workloadRunTime)) } if capacityReservationGroupId := raw["capacity_reservation_group_id"].(string); capacityReservationGroupId != "" { @@ -1064,7 +1065,7 @@ func ExpandDefaultNodePool(d *pluginsdk.ResourceData) (*[]managedclusters.Manage } if gpuInstanceProfile := raw["gpu_instance"].(string); gpuInstanceProfile != "" { - profile.GpuInstanceProfile = utils.ToPtr(managedclusters.GPUInstanceProfile(gpuInstanceProfile)) + profile.GpuInstanceProfile = pointer.To(managedclusters.GPUInstanceProfile(gpuInstanceProfile)) } count := raw["node_count"].(int) diff --git a/internal/services/cosmos/cosmosdb_mongo_collection_resource.go b/internal/services/cosmos/cosmosdb_mongo_collection_resource.go index e55f0e520e59..adaa4f527f0a 100644 --- a/internal/services/cosmos/cosmosdb_mongo_collection_resource.go +++ b/internal/services/cosmos/cosmosdb_mongo_collection_resource.go @@ -10,6 +10,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/cosmos-db/mgmt/2021-10-15/documentdb" // nolint: staticcheck + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -170,7 +171,7 @@ func resourceCosmosDbMongoCollectionCreate(d *pluginsdk.ResourceData, meta inter var ttl *int if v, ok := d.GetOk("default_ttl_seconds"); ok { - ttl = utils.Int(v.(int)) + ttl = pointer.To(v.(int)) } indexes, hasIdKey := expandCosmosMongoCollectionIndex(d.Get("index").(*pluginsdk.Set).List(), ttl) @@ -189,7 +190,7 @@ func resourceCosmosDbMongoCollectionCreate(d *pluginsdk.ResourceData, meta inter } if analyticalStorageTTL, ok := d.GetOk("analytical_storage_ttl"); ok { - db.MongoDBCollectionCreateUpdateProperties.Resource.AnalyticalStorageTTL = utils.Int32(int32(analyticalStorageTTL.(int))) + db.MongoDBCollectionCreateUpdateProperties.Resource.AnalyticalStorageTTL = pointer.To(int32(analyticalStorageTTL.(int))) } if throughput, hasThroughput := d.GetOk("throughput"); hasThroughput { @@ -239,7 +240,7 @@ func resourceCosmosDbMongoCollectionUpdate(d *pluginsdk.ResourceData, meta inter var ttl *int if v, ok := d.GetOk("default_ttl_seconds"); ok { - ttl = utils.Int(v.(int)) + ttl = pointer.To(v.(int)) } indexes, hasIdKey := expandCosmosMongoCollectionIndex(d.Get("index").(*pluginsdk.Set).List(), ttl) @@ -258,7 +259,7 @@ func resourceCosmosDbMongoCollectionUpdate(d *pluginsdk.ResourceData, meta inter } if analyticalStorageTTL, ok := d.GetOk("analytical_storage_ttl"); ok { - db.MongoDBCollectionCreateUpdateProperties.Resource.AnalyticalStorageTTL = utils.Int32(int32(analyticalStorageTTL.(int))) + db.MongoDBCollectionCreateUpdateProperties.Resource.AnalyticalStorageTTL = pointer.To(int32(analyticalStorageTTL.(int))) } if shardKey := d.Get("shard_key").(string); shardKey != "" { @@ -442,7 +443,7 @@ func expandCosmosMongoCollectionIndex(indexes []interface{}, defaultTtl *int) (* Keys: &[]string{"_ts"}, }, Options: &documentdb.MongoIndexOptions{ - ExpireAfterSeconds: utils.Int32(int32(*defaultTtl)), + ExpireAfterSeconds: pointer.To(int32(*defaultTtl)), }, }) } diff --git a/internal/services/costmanagement/anomaly_alert_resource.go b/internal/services/costmanagement/anomaly_alert_resource.go index fff42a260826..65fe55e93ed9 100644 --- a/internal/services/costmanagement/anomaly_alert_resource.go +++ b/internal/services/costmanagement/anomaly_alert_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/costmanagement/2022-06-01-preview/scheduledactions" "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" @@ -102,7 +103,7 @@ func (r AnomalyAlertResource) Create() sdk.ResourceFunc { schedule.SetStartDateAsTime(time.Now()) param := scheduledactions.ScheduledAction{ - Kind: utils.ToPtr(scheduledactions.ScheduledActionKindInsightAlert), + Kind: pointer.To(scheduledactions.ScheduledActionKindInsightAlert), Properties: &scheduledactions.ScheduledActionProperties{ DisplayName: metadata.ResourceData.Get("display_name").(string), Status: scheduledactions.ScheduledActionStatusEnabled, @@ -163,7 +164,7 @@ func (r AnomalyAlertResource) Update() sdk.ResourceFunc { schedule.SetStartDateAsTime(time.Now()) param := scheduledactions.ScheduledAction{ - Kind: utils.ToPtr(scheduledactions.ScheduledActionKindInsightAlert), + Kind: pointer.To(scheduledactions.ScheduledActionKindInsightAlert), ETag: resp.Model.ETag, Properties: &scheduledactions.ScheduledActionProperties{ DisplayName: metadata.ResourceData.Get("display_name").(string), diff --git a/internal/services/costmanagement/scheduled_action_resource.go b/internal/services/costmanagement/scheduled_action_resource.go index 0535f26e33bd..9c66404c9491 100644 --- a/internal/services/costmanagement/scheduled_action_resource.go +++ b/internal/services/costmanagement/scheduled_action_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/costmanagement/2022-10-01/scheduledactions" "github.com/hashicorp/go-azure-sdk/resource-manager/costmanagement/2022-10-01/views" @@ -190,7 +191,7 @@ func (r CostManagementScheduledActionResource) Create() sdk.ResourceFunc { } props := scheduledactions.ScheduledAction{ - Kind: utils.ToPtr(scheduledactions.ScheduledActionKindEmail), + Kind: pointer.To(scheduledactions.ScheduledActionKindEmail), Properties: &scheduledactions.ScheduledActionProperties{ DisplayName: metadata.ResourceData.Get("display_name").(string), Status: scheduledactions.ScheduledActionStatusEnabled, diff --git a/internal/services/costmanagement/view_resource_base.go b/internal/services/costmanagement/view_resource_base.go index 041b062b12f9..9bbdcb28f6ac 100644 --- a/internal/services/costmanagement/view_resource_base.go +++ b/internal/services/costmanagement/view_resource_base.go @@ -192,9 +192,9 @@ func (br costManagementViewBaseResource) createFunc(resourceName, scopeFieldName props := views.View{ Properties: &views.ViewProperties{ - Accumulated: utils.ToPtr(accumulated), + Accumulated: pointer.To(accumulated), DisplayName: utils.String(metadata.ResourceData.Get("display_name").(string)), - Chart: utils.ToPtr(views.ChartType(metadata.ResourceData.Get("chart_type").(string))), + Chart: pointer.To(views.ChartType(metadata.ResourceData.Get("chart_type").(string))), Query: &views.ReportConfigDefinition{ DataSet: expandDataset(metadata.ResourceData.Get("dataset").([]interface{})), Timeframe: views.ReportTimeframeType(metadata.ResourceData.Get("timeframe").(string)), @@ -236,7 +236,7 @@ func (br costManagementViewBaseResource) readFunc(scopeFieldName string) sdk.Res } metadata.ResourceData.Set("name", id.ViewName) - //lintignore:R001 + // lintignore:R001 metadata.ResourceData.Set(scopeFieldName, id.Scope) if model := resp.Model; model != nil { @@ -321,7 +321,7 @@ func (br costManagementViewBaseResource) updateFunc() sdk.ResourceFunc { } if metadata.ResourceData.HasChange("chart_type") { - model.Properties.Chart = utils.ToPtr(views.ChartType(metadata.ResourceData.Get("chart_type").(string))) + model.Properties.Chart = pointer.To(views.ChartType(metadata.ResourceData.Get("chart_type").(string))) } if metadata.ResourceData.HasChange("dataset") { @@ -356,7 +356,7 @@ func expandDataset(input []interface{}) *views.ReportConfigDataset { attrs := input[0].(map[string]interface{}) dataset := &views.ReportConfigDataset{ - Granularity: utils.ToPtr(views.ReportGranularityType(attrs["granularity"].(string))), + Granularity: pointer.To(views.ReportGranularityType(attrs["granularity"].(string))), } if aggregation := attrs["aggregation"].(*pluginsdk.Set).List(); len(aggregation) > 0 { @@ -418,7 +418,7 @@ func expandSorting(input []interface{}) *[]views.ReportConfigSorting { for _, item := range input { v := item.(map[string]interface{}) outputSorting = append(outputSorting, views.ReportConfigSorting{ - Direction: utils.ToPtr(views.ReportConfigSortingType(v["direction"].(string))), + Direction: pointer.To(views.ReportConfigSortingType(v["direction"].(string))), Name: v["name"].(string), }) } @@ -435,7 +435,7 @@ func expandKpis(input []interface{}) *[]views.KpiProperties { for _, item := range input { v := item.(map[string]interface{}) outputKpis = append(outputKpis, views.KpiProperties{ - Type: utils.ToPtr(views.KpiTypeType(v["type"].(string))), + Type: pointer.To(views.KpiTypeType(v["type"].(string))), Enabled: utils.Bool(true), }) } @@ -452,7 +452,7 @@ func expandPivots(input []interface{}) *[]views.PivotProperties { for _, item := range input { v := item.(map[string]interface{}) outputPivots = append(outputPivots, views.PivotProperties{ - Type: utils.ToPtr(views.PivotTypeType(v["type"].(string))), + Type: pointer.To(views.PivotTypeType(v["type"].(string))), Name: utils.String((v["name"].(string))), }) } diff --git a/internal/services/customproviders/custom_provider_resource.go b/internal/services/customproviders/custom_provider_resource.go index 28e3a2e313e6..0bf7a3d42161 100644 --- a/internal/services/customproviders/custom_provider_resource.go +++ b/internal/services/customproviders/custom_provider_resource.go @@ -7,6 +7,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" @@ -18,7 +19,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func resourceCustomProvider() *pluginsdk.Resource { @@ -233,7 +233,7 @@ func expandCustomProviderResourceType(input []interface{}) *[]customresourceprov attrs := v.(map[string]interface{}) definitions = append(definitions, customresourceprovider.CustomRPResourceTypeRouteDefinition{ - RoutingType: utils.ToPtr(customresourceprovider.ResourceTypeRouting(attrs["routing_type"].(string))), + RoutingType: pointer.To(customresourceprovider.ResourceTypeRouting(attrs["routing_type"].(string))), Name: attrs["name"].(string), Endpoint: attrs["endpoint"].(string), }) diff --git a/internal/services/databoxedge/databox_edge_device_resource.go b/internal/services/databoxedge/databox_edge_device_resource.go index efe496cb1c3d..537b93a6e4a3 100644 --- a/internal/services/databoxedge/databox_edge_device_resource.go +++ b/internal/services/databoxedge/databox_edge_device_resource.go @@ -9,6 +9,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/location" @@ -19,7 +20,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/databoxedge/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/services/databoxedge/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type DevicePropertiesModel struct { @@ -299,8 +299,8 @@ func expandDeviceSku(input string) *devices.Sku { } return &devices.Sku{ - Name: utils.ToPtr(devices.SkuName(v.Name)), - Tier: utils.ToPtr(devices.SkuTier(v.Tier)), + Name: pointer.To(devices.SkuName(v.Name)), + Tier: pointer.To(devices.SkuTier(v.Tier)), } } diff --git a/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go b/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go index 40fe20549004..a750e2bed508 100644 --- a/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go +++ b/internal/services/hdinsight/hdinsight_hadoop_cluster_resource.go @@ -11,6 +11,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" // nolint: staticcheck + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -29,10 +30,10 @@ import ( var hdInsightHadoopClusterHeadNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 2, - MaxInstanceCount: utils.Int(2), + MaxInstanceCount: pointer.To(2), CanSpecifyDisks: false, - FixedMinInstanceCount: utils.Int32(int32(1)), - FixedTargetInstanceCount: utils.Int32(int32(2)), + FixedMinInstanceCount: pointer.To(int32(1)), + FixedTargetInstanceCount: pointer.To(int32(2)), } var hdInsightHadoopClusterWorkerNodeDefinition = HDInsightNodeDefinition{ @@ -46,10 +47,10 @@ var hdInsightHadoopClusterWorkerNodeDefinition = HDInsightNodeDefinition{ var hdInsightHadoopClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 3, - MaxInstanceCount: utils.Int(3), + MaxInstanceCount: pointer.To(3), CanSpecifyDisks: false, - FixedMinInstanceCount: utils.Int32(int32(1)), - FixedTargetInstanceCount: utils.Int32(int32(3)), + FixedMinInstanceCount: pointer.To(int32(1)), + FixedTargetInstanceCount: pointer.To(int32(3)), } func resourceHDInsightHadoopCluster() *pluginsdk.Resource { @@ -640,7 +641,7 @@ func expandHDInsightApplicationEdgeNodeHttpsEndpoints(input []interface{}) *[]hd endPoint := hdinsight.ApplicationGetHTTPSEndpoint{ AccessModes: &accessModes, - DestinationPort: utils.Int32(destinationPort), + DestinationPort: pointer.To(destinationPort), PrivateIPAddress: utils.String(privateIpAddress), SubDomainSuffix: utils.String(subDomainSuffix), DisableGatewayAuth: utils.Bool(disableGatewayAuth), diff --git a/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go b/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go index 1860bfb5d609..55a728f44642 100644 --- a/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go +++ b/internal/services/hdinsight/hdinsight_hbase_cluster_resource.go @@ -10,6 +10,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" // nolint: staticcheck + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -26,9 +27,9 @@ import ( var hdInsightHBaseClusterHeadNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 2, - MaxInstanceCount: utils.Int(2), + MaxInstanceCount: pointer.To(2), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(2)), + FixedTargetInstanceCount: pointer.To(int32(2)), } var hdInsightHBaseClusterWorkerNodeDefinition = HDInsightNodeDefinition{ @@ -41,9 +42,9 @@ var hdInsightHBaseClusterWorkerNodeDefinition = HDInsightNodeDefinition{ var hdInsightHBaseClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 3, - MaxInstanceCount: utils.Int(3), + MaxInstanceCount: pointer.To(3), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(3)), + FixedTargetInstanceCount: pointer.To(int32(3)), } func resourceHDInsightHBaseCluster() *pluginsdk.Resource { diff --git a/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go b/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go index 62a21e5a01a1..82643ed69194 100644 --- a/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go +++ b/internal/services/hdinsight/hdinsight_interactive_query_cluster_resource.go @@ -10,6 +10,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" // nolint: staticcheck + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -27,9 +28,9 @@ import ( var hdInsightInteractiveQueryClusterHeadNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 2, - MaxInstanceCount: utils.Int(2), + MaxInstanceCount: pointer.To(2), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(2)), + FixedTargetInstanceCount: pointer.To(int32(2)), } var hdInsightInteractiveQueryClusterWorkerNodeDefinition = HDInsightNodeDefinition{ @@ -43,9 +44,9 @@ var hdInsightInteractiveQueryClusterWorkerNodeDefinition = HDInsightNodeDefiniti var hdInsightInteractiveQueryClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 3, - MaxInstanceCount: utils.Int(3), + MaxInstanceCount: pointer.To(3), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(3)), + FixedTargetInstanceCount: pointer.To(int32(3)), } func resourceHDInsightInteractiveQueryCluster() *pluginsdk.Resource { diff --git a/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go b/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go index b34afd29e883..8e36daad4842 100644 --- a/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go +++ b/internal/services/hdinsight/hdinsight_kafka_cluster_resource.go @@ -10,6 +10,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" // nolint: staticcheck + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -28,31 +29,31 @@ import ( var hdInsightKafkaClusterHeadNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 2, - MaxInstanceCount: utils.Int(2), + MaxInstanceCount: pointer.To(2), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(2)), + FixedTargetInstanceCount: pointer.To(int32(2)), } var hdInsightKafkaClusterWorkerNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: true, MinInstanceCount: 1, CanSpecifyDisks: true, - MaxNumberOfDisksPerNode: utils.Int(8), + MaxNumberOfDisksPerNode: pointer.To(8), } var hdInsightKafkaClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 3, - MaxInstanceCount: utils.Int(3), + MaxInstanceCount: pointer.To(3), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(3)), + FixedTargetInstanceCount: pointer.To(int32(3)), } var hdInsightKafkaClusterKafkaManagementNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 2, CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(2)), + FixedTargetInstanceCount: pointer.To(int32(2)), } func resourceHDInsightKafkaCluster() *pluginsdk.Resource { diff --git a/internal/services/hdinsight/hdinsight_spark_cluster_resource.go b/internal/services/hdinsight/hdinsight_spark_cluster_resource.go index 2f6b18353b13..3c06cc24934e 100644 --- a/internal/services/hdinsight/hdinsight_spark_cluster_resource.go +++ b/internal/services/hdinsight/hdinsight_spark_cluster_resource.go @@ -10,6 +10,7 @@ import ( "time" "github.com/Azure/azure-sdk-for-go/services/hdinsight/mgmt/2018-06-01/hdinsight" // nolint: staticcheck + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/terraform-provider-azurerm/helpers/azure" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -26,9 +27,9 @@ import ( var hdInsightSparkClusterHeadNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 2, - MaxInstanceCount: utils.Int(2), + MaxInstanceCount: pointer.To(2), CanSpecifyDisks: false, - FixedTargetInstanceCount: utils.Int32(int32(2)), + FixedTargetInstanceCount: pointer.To(int32(2)), } var hdInsightSparkClusterWorkerNodeDefinition = HDInsightNodeDefinition{ @@ -42,8 +43,8 @@ var hdInsightSparkClusterWorkerNodeDefinition = HDInsightNodeDefinition{ var hdInsightSparkClusterZookeeperNodeDefinition = HDInsightNodeDefinition{ CanSpecifyInstanceCount: false, MinInstanceCount: 3, - MaxInstanceCount: utils.Int(3), - FixedTargetInstanceCount: utils.Int32(int32(3)), + MaxInstanceCount: pointer.To(3), + FixedTargetInstanceCount: pointer.To(int32(3)), CanSpecifyDisks: false, } diff --git a/internal/services/machinelearning/machine_learning_compute_cluster_resource.go b/internal/services/machinelearning/machine_learning_compute_cluster_resource.go index d481bf9c526d..42e4f104ddbd 100644 --- a/internal/services/machinelearning/machine_learning_compute_cluster_resource.go +++ b/internal/services/machinelearning/machine_learning_compute_cluster_resource.go @@ -197,9 +197,9 @@ func resourceComputeClusterCreate(d *pluginsdk.ResourceData, meta interface{}) e EnableNodePublicIP: pointer.To(d.Get("node_public_ip_enabled").(bool)), } - computeClusterAmlComputeProperties.RemoteLoginPortPublicAccess = utils.ToPtr(machinelearningcomputes.RemoteLoginPortPublicAccessDisabled) + computeClusterAmlComputeProperties.RemoteLoginPortPublicAccess = pointer.To(machinelearningcomputes.RemoteLoginPortPublicAccessDisabled) if d.Get("ssh_public_access_enabled").(bool) { - computeClusterAmlComputeProperties.RemoteLoginPortPublicAccess = utils.ToPtr(machinelearningcomputes.RemoteLoginPortPublicAccessEnabled) + computeClusterAmlComputeProperties.RemoteLoginPortPublicAccess = pointer.To(machinelearningcomputes.RemoteLoginPortPublicAccessEnabled) } if subnetId, ok := d.GetOk("subnet_resource_id"); ok && subnetId.(string) != "" { @@ -231,7 +231,7 @@ func resourceComputeClusterCreate(d *pluginsdk.ResourceData, meta interface{}) e Tags: tags.Expand(d.Get("tags").(map[string]interface{})), Sku: &machinelearningcomputes.Sku{ Name: workspace.Model.Sku.Name, - Tier: utils.ToPtr(machinelearningcomputes.SkuTier(*workspace.Model.Sku.Tier)), + Tier: pointer.To(machinelearningcomputes.SkuTier(*workspace.Model.Sku.Tier)), }, } @@ -329,7 +329,7 @@ func resourceComputeClusterDelete(d *pluginsdk.ResourceData, meta interface{}) e } future, err := client.ComputeDelete(ctx, *id, machinelearningcomputes.ComputeDeleteOperationOptions{ - UnderlyingResourceAction: utils.ToPtr(machinelearningcomputes.UnderlyingResourceActionDelete), + UnderlyingResourceAction: pointer.To(machinelearningcomputes.UnderlyingResourceActionDelete), }) if err != nil { return fmt.Errorf("deleting %s: %+v", *id, err) diff --git a/internal/services/machinelearning/machine_learning_compute_instance_resource.go b/internal/services/machinelearning/machine_learning_compute_instance_resource.go index c14436394e14..7475adcb593a 100644 --- a/internal/services/machinelearning/machine_learning_compute_instance_resource.go +++ b/internal/services/machinelearning/machine_learning_compute_instance_resource.go @@ -212,7 +212,7 @@ func resourceComputeInstanceCreate(d *pluginsdk.ResourceData, meta interface{}) } authType := d.Get("authorization_type").(string) if authType != "" { - computeInstance.Properties.ComputeInstanceAuthorizationType = utils.ToPtr(machinelearningcomputes.ComputeInstanceAuthorizationType(authType)) + computeInstance.Properties.ComputeInstanceAuthorizationType = pointer.To(machinelearningcomputes.ComputeInstanceAuthorizationType(authType)) } parameters := machinelearningcomputes.ComputeResource{ @@ -306,7 +306,7 @@ func resourceComputeInstanceDelete(d *pluginsdk.ResourceData, meta interface{}) } future, err := client.ComputeDelete(ctx, *id, machinelearningcomputes.ComputeDeleteOperationOptions{ - UnderlyingResourceAction: utils.ToPtr(machinelearningcomputes.UnderlyingResourceActionDelete), + UnderlyingResourceAction: pointer.To(machinelearningcomputes.UnderlyingResourceActionDelete), }) if err != nil { return fmt.Errorf("deleting Machine Learning Compute (%q): %+v", id, err) @@ -334,12 +334,12 @@ func expandComputePersonalComputeInstanceSetting(input []interface{}) *machinele func expandComputeSSHSetting(input []interface{}) *machinelearningcomputes.ComputeInstanceSshSettings { if len(input) == 0 { return &machinelearningcomputes.ComputeInstanceSshSettings{ - SshPublicAccess: utils.ToPtr(machinelearningcomputes.SshPublicAccessDisabled), + SshPublicAccess: pointer.To(machinelearningcomputes.SshPublicAccessDisabled), } } value := input[0].(map[string]interface{}) return &machinelearningcomputes.ComputeInstanceSshSettings{ - SshPublicAccess: utils.ToPtr(machinelearningcomputes.SshPublicAccessEnabled), + SshPublicAccess: pointer.To(machinelearningcomputes.SshPublicAccessEnabled), AdminPublicKey: utils.String(value["public_key"].(string)), } } diff --git a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go index e64c2729071f..c5cecb6fc9a9 100644 --- a/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_blobstorage_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" @@ -19,7 +20,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/machinelearning/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type MachineLearningDataStoreBlobStorage struct{} @@ -156,8 +156,8 @@ func (r MachineLearningDataStoreBlobStorage) Create() sdk.ResourceFunc { } datastoreRaw := datastore.DatastoreResource{ - Name: utils.String(model.Name), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), + Name: pointer.To(model.Name), + Type: pointer.To(string(datastore.DatastoreTypeAzureBlob)), } storageDomainSuffix, ok := metadata.Client.Account.Environment.Storage.DomainSuffix() @@ -166,13 +166,13 @@ func (r MachineLearningDataStoreBlobStorage) Create() sdk.ResourceFunc { } props := &datastore.AzureBlobDatastore{ - AccountName: utils.String(containerId.StorageAccountName), + AccountName: pointer.To(containerId.StorageAccountName), Endpoint: storageDomainSuffix, - ContainerName: utils.String(containerId.ContainerName), - Description: utils.String(model.Description), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataAuthIdentity)), - IsDefault: utils.Bool(model.IsDefault), - Tags: utils.ToPtr(model.Tags), + ContainerName: pointer.To(containerId.ContainerName), + Description: pointer.To(model.Description), + ServiceDataAccessAuthIdentity: pointer.To(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataAuthIdentity)), + IsDefault: pointer.To(model.IsDefault), + Tags: pointer.To(model.Tags), } accountKey := model.AccountKey @@ -231,17 +231,17 @@ func (r MachineLearningDataStoreBlobStorage) Update() sdk.ResourceFunc { } datastoreRaw := datastore.DatastoreResource{ - Name: utils.String(id.DataStoreName), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureBlob)), + Name: pointer.To(id.DataStoreName), + Type: pointer.To(string(datastore.DatastoreTypeAzureBlob)), } props := &datastore.AzureBlobDatastore{ - AccountName: utils.String(containerId.StorageAccountName), - ContainerName: utils.String(containerId.ContainerName), - Description: utils.String(state.Description), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataAuthIdentity)), - IsDefault: utils.Bool(state.IsDefault), - Tags: utils.ToPtr(state.Tags), + AccountName: pointer.To(containerId.StorageAccountName), + ContainerName: pointer.To(containerId.ContainerName), + Description: pointer.To(state.Description), + ServiceDataAccessAuthIdentity: pointer.To(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataAuthIdentity)), + IsDefault: pointer.To(state.IsDefault), + Tags: pointer.To(state.Tags), } accountKey := state.AccountKey diff --git a/internal/services/machinelearning/machine_learning_datastore_datalake_gen2_resource.go b/internal/services/machinelearning/machine_learning_datastore_datalake_gen2_resource.go index eceb7ed3a240..c7c327ae9e0f 100644 --- a/internal/services/machinelearning/machine_learning_datastore_datalake_gen2_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_datalake_gen2_resource.go @@ -9,6 +9,7 @@ import ( "strings" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" @@ -172,15 +173,15 @@ func (r MachineLearningDataStoreDataLakeGen2) Create() sdk.ResourceFunc { datastoreRaw := datastore.DatastoreResource{ Name: utils.String(model.Name), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureDataLakeGenTwo)), + Type: pointer.To(string(datastore.DatastoreTypeAzureDataLakeGenTwo)), } props := &datastore.AzureDataLakeGen2Datastore{ AccountName: containerId.StorageAccountName, Filesystem: containerId.ContainerName, Description: utils.String(model.Description), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataIdentity)), - Tags: utils.ToPtr(model.Tags), + ServiceDataAccessAuthIdentity: pointer.To(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataIdentity)), + Tags: pointer.To(model.Tags), } creds := map[string]interface{}{ @@ -236,15 +237,15 @@ func (r MachineLearningDataStoreDataLakeGen2) Update() sdk.ResourceFunc { datastoreRaw := datastore.DatastoreResource{ Name: utils.String(id.DataStoreName), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureDataLakeGenTwo)), + Type: pointer.To(string(datastore.DatastoreTypeAzureDataLakeGenTwo)), } props := &datastore.AzureDataLakeGen2Datastore{ AccountName: containerId.StorageAccountName, Filesystem: containerId.ContainerName, Description: utils.String(state.Description), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataIdentity)), - Tags: utils.ToPtr(state.Tags), + ServiceDataAccessAuthIdentity: pointer.To(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataIdentity)), + Tags: pointer.To(state.Tags), } creds := map[string]interface{}{ diff --git a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go index f646f0b65b12..f4be907c8985 100644 --- a/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go +++ b/internal/services/machinelearning/machine_learning_datastore_fileshare_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/machinelearningservices/2023-04-01/datastore" @@ -19,7 +20,6 @@ import ( storageparse "github.com/hashicorp/terraform-provider-azurerm/internal/services/storage/parse" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type MachineLearningDataStoreFileShare struct{} @@ -155,16 +155,16 @@ func (r MachineLearningDataStoreFileShare) Create() sdk.ResourceFunc { } datastoreRaw := datastore.DatastoreResource{ - Name: utils.String(model.Name), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureFile)), + Name: pointer.To(model.Name), + Type: pointer.To(string(datastore.DatastoreTypeAzureFile)), } props := &datastore.AzureFileDatastore{ AccountName: fileShareId.StorageAccountName, FileShareName: fileShareId.FileshareName, - Description: utils.String(model.Description), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataIdentity)), - Tags: utils.ToPtr(model.Tags), + Description: pointer.To(model.Description), + ServiceDataAccessAuthIdentity: pointer.To(datastore.ServiceDataAccessAuthIdentity(model.ServiceDataIdentity)), + Tags: pointer.To(model.Tags), } accountKey := model.AccountKey @@ -223,16 +223,16 @@ func (r MachineLearningDataStoreFileShare) Update() sdk.ResourceFunc { } datastoreRaw := datastore.DatastoreResource{ - Name: utils.String(id.DataStoreName), - Type: utils.ToPtr(string(datastore.DatastoreTypeAzureFile)), + Name: pointer.To(id.DataStoreName), + Type: pointer.To(string(datastore.DatastoreTypeAzureFile)), } props := &datastore.AzureFileDatastore{ AccountName: fileShareId.StorageAccountName, FileShareName: fileShareId.FileshareName, - Description: utils.String(state.Description), - ServiceDataAccessAuthIdentity: utils.ToPtr(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataIdentity)), - Tags: utils.ToPtr(state.Tags), + Description: pointer.To(state.Description), + ServiceDataAccessAuthIdentity: pointer.To(datastore.ServiceDataAccessAuthIdentity(state.ServiceDataIdentity)), + Tags: pointer.To(state.Tags), } accountKey := state.AccountKey diff --git a/internal/services/machinelearning/machine_learning_inference_cluster_resource.go b/internal/services/machinelearning/machine_learning_inference_cluster_resource.go index 4d455e36ae85..7258508e339f 100644 --- a/internal/services/machinelearning/machine_learning_inference_cluster_resource.go +++ b/internal/services/machinelearning/machine_learning_inference_cluster_resource.go @@ -275,7 +275,7 @@ func resourceAksInferenceClusterDelete(d *pluginsdk.ResourceData, meta interface } future, err := client.ComputeDelete(ctx, *id, machinelearningcomputes.ComputeDeleteOperationOptions{ - UnderlyingResourceAction: utils.ToPtr(machinelearningcomputes.UnderlyingResourceActionDetach), + UnderlyingResourceAction: pointer.To(machinelearningcomputes.UnderlyingResourceActionDetach), }) if err != nil { return fmt.Errorf("deleting Inference Cluster %q in workspace %q (Resource Group %q): %+v", @@ -298,7 +298,7 @@ func expandAksComputeProperties(aksId string, aks *managedclusters.ManagedCluste Properties: &machinelearningcomputes.AKSSchemaProperties{ ClusterFqdn: utils.String(*fqdn), SslConfiguration: expandSSLConfig(d.Get("ssl").([]interface{})), - ClusterPurpose: utils.ToPtr(machinelearningcomputes.ClusterPurpose(d.Get("cluster_purpose").(string))), + ClusterPurpose: pointer.To(machinelearningcomputes.ClusterPurpose(d.Get("cluster_purpose").(string))), }, ComputeLocation: utils.String(aks.Location), Description: utils.String(d.Get("description").(string)), @@ -326,7 +326,7 @@ func expandSSLConfig(input []interface{}) *machinelearningcomputes.SslConfigurat } return &machinelearningcomputes.SslConfiguration{ - Status: utils.ToPtr(machinelearningcomputes.SslConfigStatus(sslStatus)), + Status: pointer.To(machinelearningcomputes.SslConfigStatus(sslStatus)), Cert: utils.String(v["cert"].(string)), Key: utils.String(v["key"].(string)), Cname: utils.String(v["cname"].(string)), diff --git a/internal/services/machinelearning/machine_learning_synapse_spark_resource.go b/internal/services/machinelearning/machine_learning_synapse_spark_resource.go index a7a87a4517a9..9e62cb250aa9 100644 --- a/internal/services/machinelearning/machine_learning_synapse_spark_resource.go +++ b/internal/services/machinelearning/machine_learning_synapse_spark_resource.go @@ -9,6 +9,7 @@ import ( "regexp" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/location" @@ -197,7 +198,7 @@ func resourceSynapseSparkDelete(d *pluginsdk.ResourceData, meta interface{}) err } future, err := client.ComputeDelete(ctx, *id, machinelearningcomputes.ComputeDeleteOperationOptions{ - UnderlyingResourceAction: utils.ToPtr(machinelearningcomputes.UnderlyingResourceActionDetach), + UnderlyingResourceAction: pointer.To(machinelearningcomputes.UnderlyingResourceActionDetach), }) if err != nil { return fmt.Errorf("deleting Machine Learning Compute (%q): %+v", id, err) diff --git a/internal/services/machinelearning/machine_learning_workspace_resource.go b/internal/services/machinelearning/machine_learning_workspace_resource.go index bdd872647fbb..83023089306e 100644 --- a/internal/services/machinelearning/machine_learning_workspace_resource.go +++ b/internal/services/machinelearning/machine_learning_workspace_resource.go @@ -7,6 +7,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" @@ -241,39 +242,39 @@ func resourceMachineLearningWorkspaceCreateOrUpdate(d *pluginsdk.ResourceData, m } workspace := workspaces.Workspace{ - Name: utils.String(id.WorkspaceName), - Location: utils.String(azure.NormalizeLocation(d.Get("location").(string))), + Name: pointer.To(id.WorkspaceName), + Location: pointer.To(azure.NormalizeLocation(d.Get("location").(string))), Tags: tags.Expand(d.Get("tags").(map[string]interface{})), Sku: &workspaces.Sku{ Name: d.Get("sku_name").(string), - Tier: utils.ToPtr(workspaces.SkuTier(d.Get("sku_name").(string))), + Tier: pointer.To(workspaces.SkuTier(d.Get("sku_name").(string))), }, Identity: expandedIdentity, Properties: &workspaces.WorkspaceProperties{ - V1LegacyMode: utils.ToPtr(d.Get("v1_legacy_mode_enabled").(bool)), + V1LegacyMode: pointer.To(d.Get("v1_legacy_mode_enabled").(bool)), Encryption: expandedEncryption, - StorageAccount: utils.String(d.Get("storage_account_id").(string)), - ApplicationInsights: utils.String(d.Get("application_insights_id").(string)), - KeyVault: utils.String(d.Get("key_vault_id").(string)), - PublicNetworkAccess: utils.ToPtr(workspaces.PublicNetworkAccessDisabled), + StorageAccount: pointer.To(d.Get("storage_account_id").(string)), + ApplicationInsights: pointer.To(d.Get("application_insights_id").(string)), + KeyVault: pointer.To(d.Get("key_vault_id").(string)), + PublicNetworkAccess: pointer.To(workspaces.PublicNetworkAccessDisabled), }, } if networkAccessBehindVnetEnabled { - workspace.Properties.PublicNetworkAccess = utils.ToPtr(workspaces.PublicNetworkAccessEnabled) + workspace.Properties.PublicNetworkAccess = pointer.To(workspaces.PublicNetworkAccessEnabled) } if v, ok := d.GetOk("description"); ok { - workspace.Properties.Description = utils.String(v.(string)) + workspace.Properties.Description = pointer.To(v.(string)) } if v, ok := d.GetOk("friendly_name"); ok { - workspace.Properties.FriendlyName = utils.String(v.(string)) + workspace.Properties.FriendlyName = pointer.To(v.(string)) } if v, ok := d.GetOk("container_registry_id"); ok { - workspace.Properties.ContainerRegistry = utils.String(v.(string)) + workspace.Properties.ContainerRegistry = pointer.To(v.(string)) } if v, ok := d.GetOk("high_business_impact"); ok { @@ -281,11 +282,11 @@ func resourceMachineLearningWorkspaceCreateOrUpdate(d *pluginsdk.ResourceData, m } if v, ok := d.GetOk("image_build_compute_name"); ok { - workspace.Properties.ImageBuildCompute = utils.String(v.(string)) + workspace.Properties.ImageBuildCompute = pointer.To(v.(string)) } if v, ok := d.GetOk("primary_user_assigned_identity"); ok { - workspace.Properties.PrimaryUserAssignedIdentity = utils.String(v.(string)) + workspace.Properties.PrimaryUserAssignedIdentity = pointer.To(v.(string)) } future, err := client.CreateOrUpdate(ctx, id, workspace) @@ -470,7 +471,7 @@ func expandMachineLearningWorkspaceEncryption(input []interface{}) *workspaces.E } if raw["user_assigned_identity_id"].(string) != "" { - out.Identity.UserAssignedIdentity = utils.String(raw["user_assigned_identity_id"].(string)) + out.Identity.UserAssignedIdentity = pointer.To(raw["user_assigned_identity_id"].(string)) } return &out diff --git a/internal/services/springcloud/spring_cloud_application_insights_application_performance_monitoring_resource.go b/internal/services/springcloud/spring_cloud_application_insights_application_performance_monitoring_resource.go index b7cf9877230d..ca96c0e8914b 100644 --- a/internal/services/springcloud/spring_cloud_application_insights_application_performance_monitoring_resource.go +++ b/internal/services/springcloud/spring_cloud_application_insights_application_performance_monitoring_resource.go @@ -6,6 +6,7 @@ import ( "strconv" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/appplatform/2023-09-01-preview/appplatform" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -13,7 +14,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/springcloud/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type SpringCloudApplicationInsightsApplicationPerformanceMonitoringModel struct { @@ -125,13 +125,13 @@ func (s SpringCloudApplicationInsightsApplicationPerformanceMonitoringResource) resource := appplatform.ApmResource{ Properties: &appplatform.ApmProperties{ Type: "ApplicationInsights", - Properties: utils.ToPtr(map[string]string{ + Properties: pointer.To(map[string]string{ "role_name": model.RoleName, "role_instance": model.RoleInstance, "sampling_requests_per_second": fmt.Sprintf("%d", model.SamplingRequestsPerSecond), "sampling_percentage": fmt.Sprintf("%d", model.SamplingPercentage), }), - Secrets: utils.ToPtr(map[string]string{ + Secrets: pointer.To(map[string]string{ "connection_string": model.ConnectionString, }), }, diff --git a/internal/services/springcloud/spring_cloud_service_resource.go b/internal/services/springcloud/spring_cloud_service_resource.go index efdba12579d4..10800bd4f65c 100644 --- a/internal/services/springcloud/spring_cloud_service_resource.go +++ b/internal/services/springcloud/spring_cloud_service_resource.go @@ -10,6 +10,7 @@ import ( "strings" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonids" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" @@ -511,7 +512,7 @@ func resourceSpringCloudServiceCreate(d *pluginsdk.ResourceData, meta interface{ return fmt.Errorf("applying container registries for %s: %+v", id, err) } buildResource := appplatform.BuildService{ - Properties: utils.ToPtr(expandSpringCloudBuildService(d.Get("default_build_service").([]interface{}), id)), + Properties: pointer.To(expandSpringCloudBuildService(d.Get("default_build_service").([]interface{}), id)), } buildServiceCreateFuture, err := buildServiceClient.CreateOrUpdate(ctx, id.ResourceGroup, id.SpringName, "default", buildResource) if err != nil { @@ -639,7 +640,7 @@ func resourceSpringCloudServiceUpdate(d *pluginsdk.ResourceData, meta interface{ return fmt.Errorf("applying container registries for %s: %+v", id, err) } buildResource := appplatform.BuildService{ - Properties: utils.ToPtr(expandSpringCloudBuildService(d.Get("default_build_service").([]interface{}), *id)), + Properties: pointer.To(expandSpringCloudBuildService(d.Get("default_build_service").([]interface{}), *id)), } buildServiceCreateFuture, err := buildServiceClient.CreateOrUpdate(ctx, id.ResourceGroup, id.SpringName, "default", buildResource) if err != nil { diff --git a/internal/services/streamanalytics/helpers_input.go b/internal/services/streamanalytics/helpers_input.go index 1f4f9c21ccb4..2310b82bd7e1 100644 --- a/internal/services/streamanalytics/helpers_input.go +++ b/internal/services/streamanalytics/helpers_input.go @@ -6,10 +6,10 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func schemaStreamAnalyticsStreamInputSerialization() *pluginsdk.Schema { @@ -77,8 +77,8 @@ func expandStreamAnalyticsStreamInputSerialization(input []interface{}) (inputs. } return inputs.CsvSerialization{ Properties: &inputs.CsvSerializationProperties{ - Encoding: utils.ToPtr(inputs.Encoding(encoding)), - FieldDelimiter: utils.String(fieldDelimiter), + Encoding: pointer.To(inputs.Encoding(encoding)), + FieldDelimiter: pointer.To(fieldDelimiter), }, }, nil @@ -89,7 +89,7 @@ func expandStreamAnalyticsStreamInputSerialization(input []interface{}) (inputs. return inputs.JsonSerialization{ Properties: &inputs.JsonSerializationProperties{ - Encoding: utils.ToPtr(inputs.Encoding(encoding)), + Encoding: pointer.To(inputs.Encoding(encoding)), }, }, nil } @@ -121,8 +121,8 @@ func expandStreamAnalyticsStreamInputSerializationTyped(serialization []Serializ } return inputs.CsvSerialization{ Properties: &inputs.CsvSerializationProperties{ - Encoding: utils.ToPtr(inputs.Encoding(encoding)), - FieldDelimiter: utils.String(fieldDelimiter), + Encoding: pointer.To(inputs.Encoding(encoding)), + FieldDelimiter: pointer.To(fieldDelimiter), }, }, nil @@ -133,7 +133,7 @@ func expandStreamAnalyticsStreamInputSerializationTyped(serialization []Serializ return inputs.JsonSerialization{ Properties: &inputs.JsonSerializationProperties{ - Encoding: utils.ToPtr(inputs.Encoding(encoding)), + Encoding: pointer.To(inputs.Encoding(encoding)), }, }, nil } diff --git a/internal/services/streamanalytics/helpers_output.go b/internal/services/streamanalytics/helpers_output.go index d64899f044b6..bed7e88f1a90 100644 --- a/internal/services/streamanalytics/helpers_output.go +++ b/internal/services/streamanalytics/helpers_output.go @@ -6,10 +6,10 @@ package streamanalytics import ( "fmt" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func schemaStreamAnalyticsOutputSerialization() *pluginsdk.Schema { @@ -99,8 +99,8 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (outputs.Seri } return outputs.CsvSerialization{ Properties: &outputs.CsvSerializationProperties{ - Encoding: utils.ToPtr(outputs.Encoding(encoding)), - FieldDelimiter: utils.String(fieldDelimiter), + Encoding: pointer.To(outputs.Encoding(encoding)), + FieldDelimiter: pointer.To(fieldDelimiter), }, }, nil @@ -117,8 +117,8 @@ func expandStreamAnalyticsOutputSerialization(input []interface{}) (outputs.Seri return outputs.JsonSerialization{ Properties: &outputs.JsonSerializationProperties{ - Encoding: utils.ToPtr(outputs.Encoding(encoding)), - Format: utils.ToPtr(outputs.JsonOutputSerializationFormat(format)), + Encoding: pointer.To(outputs.Encoding(encoding)), + Format: pointer.To(outputs.JsonOutputSerializationFormat(format)), }, }, nil diff --git a/internal/services/streamanalytics/stream_analytics_cluster_resource.go b/internal/services/streamanalytics/stream_analytics_cluster_resource.go index 6f603833ec89..ba2454ff89ef 100644 --- a/internal/services/streamanalytics/stream_analytics_cluster_resource.go +++ b/internal/services/streamanalytics/stream_analytics_cluster_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/tags" @@ -16,7 +17,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/migration" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type ClusterResource struct{} @@ -99,11 +99,11 @@ func (r ClusterResource) Create() sdk.ResourceFunc { } props := clusters.Cluster{ - Name: utils.String(model.Name), - Location: utils.String(model.Location), + Name: pointer.To(model.Name), + Location: pointer.To(model.Location), Sku: &clusters.ClusterSku{ - Name: utils.ToPtr(clusters.ClusterSkuNameDefault), - Capacity: utils.ToPtr(model.StreamingCapacity), + Name: pointer.To(clusters.ClusterSkuNameDefault), + Capacity: pointer.To(model.StreamingCapacity), }, Tags: tags.Expand(model.Tags), } @@ -200,7 +200,7 @@ func (r ClusterResource) Update() sdk.ResourceFunc { if metadata.ResourceData.HasChange("streaming_capacity") || metadata.ResourceData.HasChange("tags") { props := clusters.Cluster{ Sku: &clusters.ClusterSku{ - Capacity: utils.ToPtr(state.StreamingCapacity), + Capacity: pointer.To(state.StreamingCapacity), }, Tags: tags.Expand(state.Tags), } diff --git a/internal/services/streamanalytics/stream_analytics_job_data_source.go b/internal/services/streamanalytics/stream_analytics_job_data_source.go index 5309c59a501f..9ed6ea5630c4 100644 --- a/internal/services/streamanalytics/stream_analytics_job_data_source.go +++ b/internal/services/streamanalytics/stream_analytics_job_data_source.go @@ -7,6 +7,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/location" @@ -14,7 +15,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/clients" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func dataSourceStreamAnalyticsJob() *pluginsdk.Resource { @@ -108,7 +108,7 @@ func dataSourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{ id := streamingjobs.NewStreamingJobID(subscriptionId, d.Get("resource_group_name").(string), d.Get("name").(string)) opts := streamingjobs.GetOperationOptions{ - Expand: utils.ToPtr("transformation"), + Expand: pointer.To("transformation"), } resp, err := client.Get(ctx, id, opts) if err != nil { diff --git a/internal/services/streamanalytics/stream_analytics_job_resource.go b/internal/services/streamanalytics/stream_analytics_job_resource.go index a9c68d9f93f6..fbd90226dea9 100644 --- a/internal/services/streamanalytics/stream_analytics_job_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-helpers/resourcemanager/identity" @@ -261,14 +262,14 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte Location: utils.String(azure.NormalizeLocation(d.Get("location").(string))), Properties: &streamingjobs.StreamingJobProperties{ Sku: &streamingjobs.Sku{ - Name: utils.ToPtr(streamingjobs.SkuNameStandard), + Name: pointer.To(streamingjobs.SkuNameStandard), }, - ContentStoragePolicy: utils.ToPtr(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), - EventsLateArrivalMaxDelayInSeconds: utils.Int64(int64(d.Get("events_late_arrival_max_delay_in_seconds").(int))), - EventsOutOfOrderMaxDelayInSeconds: utils.Int64(int64(d.Get("events_out_of_order_max_delay_in_seconds").(int))), - EventsOutOfOrderPolicy: utils.ToPtr(streamingjobs.EventsOutOfOrderPolicy(d.Get("events_out_of_order_policy").(string))), - OutputErrorPolicy: utils.ToPtr(streamingjobs.OutputErrorPolicy(d.Get("output_error_policy").(string))), - JobType: utils.ToPtr(streamingjobs.JobType(jobType)), + ContentStoragePolicy: pointer.To(streamingjobs.ContentStoragePolicy(contentStoragePolicy)), + EventsLateArrivalMaxDelayInSeconds: pointer.To(int64(d.Get("events_late_arrival_max_delay_in_seconds").(int))), + EventsOutOfOrderMaxDelayInSeconds: pointer.To(int64(d.Get("events_out_of_order_max_delay_in_seconds").(int))), + EventsOutOfOrderPolicy: pointer.To(streamingjobs.EventsOutOfOrderPolicy(d.Get("events_out_of_order_policy").(string))), + OutputErrorPolicy: pointer.To(streamingjobs.OutputErrorPolicy(d.Get("output_error_policy").(string))), + JobType: pointer.To(streamingjobs.JobType(jobType)), }, Identity: expandedIdentity, Tags: tags.Expand(d.Get("tags").(map[string]interface{})), @@ -276,7 +277,7 @@ func resourceStreamAnalyticsJobCreateUpdate(d *pluginsdk.ResourceData, meta inte if _, ok := d.GetOk("compatibility_level"); ok { compatibilityLevel := d.Get("compatibility_level").(string) - props.Properties.CompatibilityLevel = utils.ToPtr(streamingjobs.CompatibilityLevel(compatibilityLevel)) + props.Properties.CompatibilityLevel = pointer.To(streamingjobs.CompatibilityLevel(compatibilityLevel)) } if contentStoragePolicy == string(streamingjobs.ContentStoragePolicyJobStorageAccount) { @@ -365,7 +366,7 @@ func resourceStreamAnalyticsJobRead(d *pluginsdk.ResourceData, meta interface{}) } opts := streamingjobs.GetOperationOptions{ - Expand: utils.ToPtr("transformation"), + Expand: pointer.To("transformation"), } resp, err := client.Get(ctx, *id, opts) if err != nil { @@ -546,7 +547,7 @@ func expandJobStorageAccount(input []interface{}) *streamingjobs.JobStorageAccou accountKey := v["account_key"].(string) return &streamingjobs.JobStorageAccount{ - AuthenticationMode: utils.ToPtr(streamingjobs.AuthenticationMode(authenticationMode)), + AuthenticationMode: pointer.To(streamingjobs.AuthenticationMode(authenticationMode)), AccountName: utils.String(accountName), AccountKey: utils.String(accountKey), } diff --git a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go index ba115582812c..4bcc86b45e3c 100644 --- a/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go +++ b/internal/services/streamanalytics/stream_analytics_job_schedule_resource.go @@ -9,6 +9,7 @@ import ( "time" "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/terraform-provider-azurerm/helpers/validate" @@ -19,7 +20,6 @@ import ( streamAnalyticsValidate "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type JobScheduleResource struct{} @@ -117,7 +117,7 @@ func (r JobScheduleResource) Create() sdk.ResourceFunc { } props := &streamingjobs.StartStreamingJobParameters{ - OutputStartMode: utils.ToPtr(outputStartMode), + OutputStartMode: pointer.To(outputStartMode), } if outputStartMode == streamingjobs.OutputStartModeCustomTime { @@ -128,7 +128,7 @@ func (r JobScheduleResource) Create() sdk.ResourceFunc { outputStartTime := &date.Time{ Time: startTime, } - props.OutputStartTime = utils.String(outputStartTime.String()) + props.OutputStartTime = pointer.To(outputStartTime.String()) } } @@ -219,11 +219,11 @@ func (r JobScheduleResource) Update() sdk.ResourceFunc { } props := &streamingjobs.StartStreamingJobParameters{ - OutputStartMode: utils.ToPtr(outputStartMode), + OutputStartMode: pointer.To(outputStartMode), } if outputStartMode == streamingjobs.OutputStartModeCustomTime { - props.OutputStartTime = utils.String(outputStartTime.String()) + props.OutputStartTime = pointer.To(outputStartTime.String()) } var opts streamingjobs.GetOperationOptions diff --git a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go index 1aebd1f3ec08..c58f80b85f4c 100644 --- a/internal/services/streamanalytics/stream_analytics_output_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_blob_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" @@ -158,21 +159,21 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me } props := outputs.Output{ - Name: utils.String(id.OutputName), + Name: pointer.To(id.OutputName), Properties: &outputs.OutputProperties{ Datasource: &outputs.BlobOutputDataSource{ Properties: &outputs.BlobOutputDataSourceProperties{ StorageAccounts: &[]outputs.StorageAccount{ { AccountKey: getStorageAccountKey(d.Get("storage_account_key").(string)), - AccountName: utils.String(storageAccountName), + AccountName: pointer.To(storageAccountName), }, }, - Container: utils.String(containerName), - DateFormat: utils.String(dateFormat), - PathPattern: utils.String(pathPattern), - TimeFormat: utils.String(timeFormat), - AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), + Container: pointer.To(containerName), + DateFormat: pointer.To(dateFormat), + PathPattern: pointer.To(pathPattern), + TimeFormat: pointer.To(timeFormat), + AuthenticationMode: pointer.To(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, @@ -180,7 +181,7 @@ func resourceStreamAnalyticsOutputBlobCreateUpdate(d *pluginsdk.ResourceData, me } if batchMaxWaitTime, ok := d.GetOk("batch_max_wait_time"); ok { - props.Properties.TimeWindow = utils.String(batchMaxWaitTime.(string)) + props.Properties.TimeWindow = pointer.To(batchMaxWaitTime.(string)) } if batchMinRows, ok := d.GetOk("batch_min_rows"); ok { @@ -309,5 +310,5 @@ func getStorageAccountKey(input string) *string { return nil } - return utils.String(input) + return pointer.To(input) } diff --git a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go index 76da80305450..68cb1980b000 100644 --- a/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_eventhub_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" @@ -149,11 +150,11 @@ func resourceStreamAnalyticsOutputEventHubCreateUpdate(d *pluginsdk.ResourceData } eventHubOutputDataSourceProps := &outputs.EventHubOutputDataSourceProperties{ - PartitionKey: utils.String(partitionKey), + PartitionKey: pointer.To(partitionKey), PropertyColumns: utils.ExpandStringSlice(propertyColumns), - EventHubName: utils.String(eventHubName), - ServiceBusNamespace: utils.String(serviceBusNamespace), - AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), + EventHubName: pointer.To(eventHubName), + ServiceBusNamespace: pointer.To(serviceBusNamespace), + AuthenticationMode: pointer.To(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), } if sharedAccessPolicyKey != "" { @@ -165,7 +166,7 @@ func resourceStreamAnalyticsOutputEventHubCreateUpdate(d *pluginsdk.ResourceData } props := outputs.Output{ - Name: utils.String(id.OutputName), + Name: pointer.To(id.OutputName), Properties: &outputs.OutputProperties{ Datasource: &outputs.EventHubOutputDataSource{ Properties: eventHubOutputDataSourceProps, diff --git a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go index f0a8d891432c..dfd1254cc450 100644 --- a/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_mssql_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" "github.com/hashicorp/terraform-provider-azurerm/helpers/tf" @@ -16,7 +17,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func resourceStreamAnalyticsOutputSql() *pluginsdk.Resource { @@ -146,22 +146,22 @@ func resourceStreamAnalyticsOutputSqlCreateUpdate(d *pluginsdk.ResourceData, met } dataSourceProperties := outputs.AzureSqlDatabaseDataSourceProperties{ - Server: utils.String(d.Get("server").(string)), - Database: utils.String(d.Get("database").(string)), - Table: utils.String(d.Get("table").(string)), - MaxBatchCount: utils.Float(d.Get("max_batch_count").(float64)), - MaxWriterCount: utils.Float(d.Get("max_writer_count").(float64)), - AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), + Server: pointer.To(d.Get("server").(string)), + Database: pointer.To(d.Get("database").(string)), + Table: pointer.To(d.Get("table").(string)), + MaxBatchCount: pointer.To(d.Get("max_batch_count").(float64)), + MaxWriterCount: pointer.To(d.Get("max_writer_count").(float64)), + AuthenticationMode: pointer.To(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), } // Add user/password dataSourceProperties only if authentication mode requires them if *dataSourceProperties.AuthenticationMode == outputs.AuthenticationModeConnectionString { - dataSourceProperties.User = utils.String(d.Get("user").(string)) - dataSourceProperties.Password = utils.String(d.Get("password").(string)) + dataSourceProperties.User = pointer.To(d.Get("user").(string)) + dataSourceProperties.Password = pointer.To(d.Get("password").(string)) } props := outputs.Output{ - Name: utils.String(id.OutputName), + Name: pointer.To(id.OutputName), Properties: &outputs.OutputProperties{ Datasource: &outputs.AzureSqlDatabaseOutputDataSource{ Properties: &dataSourceProperties, diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go index 9aff1c6009a9..b6de5cd7775a 100644 --- a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" @@ -16,7 +17,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/migration" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type OutputPowerBIResource struct{} @@ -131,24 +131,24 @@ func (r OutputPowerBIResource) Create() sdk.ResourceFunc { } powerBIOutputProps := &outputs.PowerBIOutputDataSourceProperties{ - Dataset: utils.String(model.DataSet), - Table: utils.String(model.Table), - GroupId: utils.String(model.GroupID), - GroupName: utils.String(model.GroupName), - RefreshToken: utils.String("someRefreshToken"), // A valid refresh token is currently only obtainable via the Azure Portal. Put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. - AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode("Msi")), // Set authentication mode as "Msi" here since other modes requires params obtainable from portal only. + Dataset: pointer.To(model.DataSet), + Table: pointer.To(model.Table), + GroupId: pointer.To(model.GroupID), + GroupName: pointer.To(model.GroupName), + RefreshToken: pointer.To("someRefreshToken"), // A valid refresh token is currently only obtainable via the Azure Portal. Put a dummy string value here when creating the data source and then going to the Azure Portal to authenticate the data source which will update this property with a valid refresh token. + AuthenticationMode: pointer.To(outputs.AuthenticationMode("Msi")), // Set authentication mode as "Msi" here since other modes requires params obtainable from portal only. } if model.TokenUserDisplayName != "" { - powerBIOutputProps.TokenUserDisplayName = utils.String(model.TokenUserDisplayName) + powerBIOutputProps.TokenUserDisplayName = pointer.To(model.TokenUserDisplayName) } if model.TokenUserPrincipalName != "" { - powerBIOutputProps.TokenUserPrincipalName = utils.String(model.TokenUserPrincipalName) + powerBIOutputProps.TokenUserPrincipalName = pointer.To(model.TokenUserPrincipalName) } props := outputs.Output{ - Name: utils.String(model.Name), + Name: pointer.To(model.Name), Properties: &outputs.OutputProperties{ Datasource: &outputs.PowerBIOutputDataSource{ Properties: powerBIOutputProps, diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go index d103ef1b594c..47e0a8cdacf1 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_queue_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" @@ -153,21 +154,21 @@ func resourceStreamAnalyticsOutputServiceBusQueueCreateUpdate(d *pluginsdk.Resou systemPropertyColumns := d.Get("system_property_columns") dataSourceProperties := &outputs.ServiceBusQueueOutputDataSourceProperties{ - QueueName: utils.String(queueName), - ServiceBusNamespace: utils.String(serviceBusNamespace), + QueueName: pointer.To(queueName), + ServiceBusNamespace: pointer.To(serviceBusNamespace), PropertyColumns: utils.ExpandStringSlice(d.Get("property_columns").([]interface{})), SystemPropertyColumns: &systemPropertyColumns, - AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), + AuthenticationMode: pointer.To(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), } // Add shared access policy key/name only if required by authentication mode if *dataSourceProperties.AuthenticationMode == outputs.AuthenticationModeConnectionString { - dataSourceProperties.SharedAccessPolicyName = utils.String(sharedAccessPolicyName) - dataSourceProperties.SharedAccessPolicyKey = utils.String(sharedAccessPolicyKey) + dataSourceProperties.SharedAccessPolicyName = pointer.To(sharedAccessPolicyName) + dataSourceProperties.SharedAccessPolicyKey = pointer.To(sharedAccessPolicyKey) } props := outputs.Output{ - Name: utils.String(id.OutputName), + Name: pointer.To(id.OutputName), Properties: &outputs.OutputProperties{ Datasource: &outputs.ServiceBusQueueOutputDataSource{ Properties: dataSourceProperties, diff --git a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go index 41001924b761..a52f0151275a 100644 --- a/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go +++ b/internal/services/streamanalytics/stream_analytics_output_servicebus_topic_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2021-10-01-preview/outputs" @@ -149,21 +150,21 @@ func resourceStreamAnalyticsOutputServiceBusTopicCreateUpdate(d *pluginsdk.Resou systemPropertyColumns := d.Get("system_property_columns").(map[string]interface{}) dataSourceProperties := &outputs.ServiceBusTopicOutputDataSourceProperties{ - TopicName: utils.String(d.Get("topic_name").(string)), - ServiceBusNamespace: utils.String(d.Get("servicebus_namespace").(string)), + TopicName: pointer.To(d.Get("topic_name").(string)), + ServiceBusNamespace: pointer.To(d.Get("servicebus_namespace").(string)), PropertyColumns: utils.ExpandStringSlice(d.Get("property_columns").([]interface{})), SystemPropertyColumns: expandSystemPropertyColumns(systemPropertyColumns), - AuthenticationMode: utils.ToPtr(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), + AuthenticationMode: pointer.To(outputs.AuthenticationMode(d.Get("authentication_mode").(string))), } // Add shared access policy key/name only if required by authentication mode if *dataSourceProperties.AuthenticationMode == outputs.AuthenticationModeConnectionString { - dataSourceProperties.SharedAccessPolicyKey = utils.String(d.Get("shared_access_policy_key").(string)) - dataSourceProperties.SharedAccessPolicyName = utils.String(d.Get("shared_access_policy_name").(string)) + dataSourceProperties.SharedAccessPolicyKey = pointer.To(d.Get("shared_access_policy_key").(string)) + dataSourceProperties.SharedAccessPolicyName = pointer.To(d.Get("shared_access_policy_name").(string)) } props := outputs.Output{ - Name: utils.String(id.OutputName), + Name: pointer.To(id.OutputName), Properties: &outputs.OutputProperties{ Datasource: &outputs.ServiceBusTopicOutputDataSource{ Properties: dataSourceProperties, diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go index 81db1b9a7b81..bece9a8d3df7 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_blob_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" @@ -17,7 +18,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func resourceStreamAnalyticsReferenceInputBlob() *pluginsdk.Resource { @@ -141,21 +141,21 @@ func resourceStreamAnalyticsReferenceInputBlobCreate(d *pluginsdk.ResourceData, } props := inputs.Input{ - Name: utils.String(id.InputName), + Name: pointer.To(id.InputName), Properties: &inputs.ReferenceInputProperties{ Datasource: &inputs.BlobReferenceInputDataSource{ Properties: &inputs.BlobDataSourceProperties{ - Container: utils.String(d.Get("storage_container_name").(string)), - DateFormat: utils.String(d.Get("date_format").(string)), - PathPattern: utils.String(d.Get("path_pattern").(string)), - TimeFormat: utils.String(d.Get("time_format").(string)), + Container: pointer.To(d.Get("storage_container_name").(string)), + DateFormat: pointer.To(d.Get("date_format").(string)), + PathPattern: pointer.To(d.Get("path_pattern").(string)), + TimeFormat: pointer.To(d.Get("time_format").(string)), StorageAccounts: &[]inputs.StorageAccount{ { - AccountName: utils.String(d.Get("storage_account_name").(string)), + AccountName: pointer.To(d.Get("storage_account_name").(string)), AccountKey: normalizeAccountKey(d.Get("storage_account_key").(string)), }, }, - AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), + AuthenticationMode: pointer.To(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, @@ -190,21 +190,21 @@ func resourceStreamAnalyticsReferenceInputBlobUpdate(d *pluginsdk.ResourceData, // TODO d.HasChanges() props := inputs.Input{ - Name: utils.String(id.InputName), + Name: pointer.To(id.InputName), Properties: &inputs.ReferenceInputProperties{ Datasource: &inputs.BlobReferenceInputDataSource{ Properties: &inputs.BlobDataSourceProperties{ - Container: utils.String(d.Get("storage_container_name").(string)), - DateFormat: utils.String(d.Get("date_format").(string)), - PathPattern: utils.String(d.Get("path_pattern").(string)), - TimeFormat: utils.String(d.Get("time_format").(string)), + Container: pointer.To(d.Get("storage_container_name").(string)), + DateFormat: pointer.To(d.Get("date_format").(string)), + PathPattern: pointer.To(d.Get("path_pattern").(string)), + TimeFormat: pointer.To(d.Get("time_format").(string)), StorageAccounts: &[]inputs.StorageAccount{ { - AccountName: utils.String(d.Get("storage_account_name").(string)), + AccountName: pointer.To(d.Get("storage_account_name").(string)), AccountKey: normalizeAccountKey(d.Get("storage_account_key").(string)), }, }, - AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), + AuthenticationMode: pointer.To(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), }, }, Serialization: serialization, @@ -327,7 +327,7 @@ func resourceStreamAnalyticsReferenceInputBlobDelete(d *pluginsdk.ResourceData, func normalizeAccountKey(accountKey string) *string { if accountKey != "" { - return utils.String(accountKey) + return pointer.To(accountKey) } return nil diff --git a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go index dc1e6679ab7f..b68c629f6516 100644 --- a/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go +++ b/internal/services/streamanalytics/stream_analytics_reference_input_mssql_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" @@ -18,7 +19,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func resourceStreamAnalyticsReferenceMsSql() *pluginsdk.Resource { @@ -154,31 +154,31 @@ func resourceStreamAnalyticsReferenceInputMsSqlCreateUpdate(d *pluginsdk.Resourc } properties := &inputs.AzureSqlReferenceInputDataSourceProperties{ - Server: utils.String(d.Get("server").(string)), - Database: utils.String(d.Get("database").(string)), - User: utils.String(d.Get("username").(string)), - Password: utils.String(d.Get("password").(string)), - RefreshType: utils.ToPtr(inputs.RefreshType(refreshType)), + Server: pointer.To(d.Get("server").(string)), + Database: pointer.To(d.Get("database").(string)), + User: pointer.To(d.Get("username").(string)), + Password: pointer.To(d.Get("password").(string)), + RefreshType: pointer.To(inputs.RefreshType(refreshType)), } if v, ok := d.GetOk("refresh_interval_duration"); ok { - properties.RefreshRate = utils.String(v.(string)) + properties.RefreshRate = pointer.To(v.(string)) } if v, ok := d.GetOk("full_snapshot_query"); ok { - properties.FullSnapshotQuery = utils.String(v.(string)) + properties.FullSnapshotQuery = pointer.To(v.(string)) } if v, ok := d.GetOk("delta_snapshot_query"); ok { - properties.DeltaSnapshotQuery = utils.String(v.(string)) + properties.DeltaSnapshotQuery = pointer.To(v.(string)) } if v, ok := d.GetOk("table"); ok { - properties.Table = utils.String(v.(string)) + properties.Table = pointer.To(v.(string)) } props := inputs.Input{ - Name: utils.String(id.InputName), + Name: pointer.To(id.InputName), Properties: &inputs.ReferenceInputProperties{ Datasource: &inputs.AzureSqlReferenceInputDataSource{ Properties: properties, diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go index 154dae1d78a9..5ab558f7aa48 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_resource.go @@ -8,6 +8,7 @@ import ( "log" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-helpers/resourcemanager/commonschema" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" @@ -17,7 +18,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" "github.com/hashicorp/terraform-provider-azurerm/internal/timeouts" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) func resourceStreamAnalyticsStreamInputEventHub() *pluginsdk.Resource { @@ -141,28 +141,28 @@ func resourceStreamAnalyticsStreamInputEventHubCreateUpdate(d *pluginsdk.Resourc } eventHubDataSourceProps := &inputs.EventHubStreamInputDataSourceProperties{ - EventHubName: utils.String(d.Get("eventhub_name").(string)), - ServiceBusNamespace: utils.String(d.Get("servicebus_namespace").(string)), - ConsumerGroupName: utils.String(d.Get("eventhub_consumer_group_name").(string)), - AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), + EventHubName: pointer.To(d.Get("eventhub_name").(string)), + ServiceBusNamespace: pointer.To(d.Get("servicebus_namespace").(string)), + ConsumerGroupName: pointer.To(d.Get("eventhub_consumer_group_name").(string)), + AuthenticationMode: pointer.To(inputs.AuthenticationMode(d.Get("authentication_mode").(string))), } if v, ok := d.GetOk("shared_access_policy_key"); ok { - eventHubDataSourceProps.SharedAccessPolicyKey = utils.String(v.(string)) + eventHubDataSourceProps.SharedAccessPolicyKey = pointer.To(v.(string)) } if v, ok := d.GetOk("shared_access_policy_name"); ok { - eventHubDataSourceProps.SharedAccessPolicyName = utils.String(v.(string)) + eventHubDataSourceProps.SharedAccessPolicyName = pointer.To(v.(string)) } props := inputs.Input{ - Name: utils.String(id.InputName), + Name: pointer.To(id.InputName), Properties: &inputs.StreamInputProperties{ Datasource: &inputs.EventHubStreamInputDataSource{ Properties: eventHubDataSourceProps, }, Serialization: serialization, - PartitionKey: utils.String(d.Get("partition_key").(string)), + PartitionKey: pointer.To(d.Get("partition_key").(string)), }, } diff --git a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go index 110d2139e37f..f5447fbf3a43 100644 --- a/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go +++ b/internal/services/streamanalytics/stream_analytics_stream_input_eventhub_v2_resource.go @@ -8,6 +8,7 @@ import ( "fmt" "time" + "github.com/hashicorp/go-azure-helpers/lang/pointer" "github.com/hashicorp/go-azure-helpers/lang/response" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/inputs" "github.com/hashicorp/go-azure-sdk/resource-manager/streamanalytics/2020-03-01/streamingjobs" @@ -16,7 +17,6 @@ import ( "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/migration" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" - "github.com/hashicorp/terraform-provider-azurerm/utils" ) type StreamInputEventHubV2Resource struct{} @@ -153,18 +153,18 @@ func (r StreamInputEventHubV2Resource) Create() sdk.ResourceFunc { } props := &inputs.EventHubStreamInputDataSourceProperties{ - ServiceBusNamespace: utils.String(model.ServiceBusNamespace), - EventHubName: utils.String(model.EventHubName), - ConsumerGroupName: utils.String(model.EventHubConsumerGroupName), - AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(model.AuthenticationMode)), + ServiceBusNamespace: pointer.To(model.ServiceBusNamespace), + EventHubName: pointer.To(model.EventHubName), + ConsumerGroupName: pointer.To(model.EventHubConsumerGroupName), + AuthenticationMode: pointer.To(inputs.AuthenticationMode(model.AuthenticationMode)), } if v := model.SharedAccessPolicyKey; v != "" { - props.SharedAccessPolicyKey = utils.String(v) + props.SharedAccessPolicyKey = pointer.To(v) } if v := model.SharedAccessPolicyName; v != "" { - props.SharedAccessPolicyName = utils.String(v) + props.SharedAccessPolicyName = pointer.To(v) } serialization, err := expandStreamAnalyticsStreamInputSerializationTyped(model.Serialization) @@ -173,13 +173,13 @@ func (r StreamInputEventHubV2Resource) Create() sdk.ResourceFunc { } payload := inputs.Input{ - Name: utils.String(model.Name), + Name: pointer.To(model.Name), Properties: &inputs.StreamInputProperties{ Datasource: &inputs.EventHubV2StreamInputDataSource{ Properties: props, }, Serialization: serialization, - PartitionKey: utils.String(model.PartitionKey), + PartitionKey: pointer.To(model.PartitionKey), }, } @@ -214,10 +214,10 @@ func (r StreamInputEventHubV2Resource) Update() sdk.ResourceFunc { if d.HasChangesExcept("name", "stream_analytics_job_id") { props := &inputs.EventHubStreamInputDataSourceProperties{ - ServiceBusNamespace: utils.String(state.ServiceBusNamespace), - EventHubName: utils.String(state.EventHubName), - ConsumerGroupName: utils.String(state.EventHubConsumerGroupName), - AuthenticationMode: utils.ToPtr(inputs.AuthenticationMode(state.AuthenticationMode)), + ServiceBusNamespace: pointer.To(state.ServiceBusNamespace), + EventHubName: pointer.To(state.EventHubName), + ConsumerGroupName: pointer.To(state.EventHubConsumerGroupName), + AuthenticationMode: pointer.To(inputs.AuthenticationMode(state.AuthenticationMode)), } serialization, err := expandStreamAnalyticsStreamInputSerializationTyped(state.Serialization) @@ -226,13 +226,13 @@ func (r StreamInputEventHubV2Resource) Update() sdk.ResourceFunc { } payload := inputs.Input{ - Name: utils.String(state.Name), + Name: pointer.To(state.Name), Properties: &inputs.StreamInputProperties{ Datasource: &inputs.EventHubV2StreamInputDataSource{ Properties: props, }, Serialization: serialization, - PartitionKey: utils.String(state.PartitionKey), + PartitionKey: pointer.To(state.PartitionKey), }, } diff --git a/utils/pointer.go b/utils/pointer.go index 985c7f4ae9a4..7c4cc8fdf880 100644 --- a/utils/pointer.go +++ b/utils/pointer.go @@ -7,10 +7,6 @@ func Bool(input bool) *bool { return &input } -func Int(input int) *int { - return &input -} - func Int32(input int32) *int32 { return &input } @@ -27,10 +23,6 @@ func String(input string) *string { return &input } -func ToPtr[E any](e E) *E { - return &e -} - func StringSlice(input []string) *[]string { if input == nil { return nil