From 72be377034e5a86307b758f5c974ff8c80e5a7ab Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Mon, 19 Oct 2020 16:09:20 -0700
Subject: [PATCH 01/46] Initial checkin wait on sub list
---
.../services/loganalytics/client/client.go | 5 +
.../log_analytics_clusters_resource.go | 393 ++++++++++++++++++
.../parse/log_analytics_cluster.go | 31 ++
.../parse/log_analytics_cluster_test.go | 72 ++++
.../services/loganalytics/registration.go | 5 +-
.../validate/log_analytics_cluster.go | 27 ++
.../validate/log_analytics_cluster_test.go | 56 +++
.../r/log_analytics_cluster.html.markdown | 117 ++++++
8 files changed, 704 insertions(+), 2 deletions(-)
create mode 100644 azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
create mode 100644 azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go
create mode 100644 azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go
create mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
create mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go
create mode 100644 website/docs/r/log_analytics_cluster.html.markdown
diff --git a/azurerm/internal/services/loganalytics/client/client.go b/azurerm/internal/services/loganalytics/client/client.go
index dabcc261098b..10760f4c932c 100644
--- a/azurerm/internal/services/loganalytics/client/client.go
+++ b/azurerm/internal/services/loganalytics/client/client.go
@@ -7,6 +7,7 @@ import (
)
type Client struct {
+ ClusterClient *operationalinsights.ClustersClient
DataSourcesClient *operationalinsights.DataSourcesClient
LinkedServicesClient *operationalinsights.LinkedServicesClient
SavedSearchesClient *operationalinsights.SavedSearchesClient
@@ -16,6 +17,9 @@ type Client struct {
}
func NewClient(o *common.ClientOptions) *Client {
+ ClusterClient := operationalinsights.NewClustersClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
+ o.ConfigureClient(&ClusterClient.Client, o.ResourceManagerAuthorizer)
+
DataSourcesClient := operationalinsights.NewDataSourcesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&DataSourcesClient.Client, o.ResourceManagerAuthorizer)
@@ -35,6 +39,7 @@ func NewClient(o *common.ClientOptions) *Client {
o.ConfigureClient(&LinkedServicesClient.Client, o.ResourceManagerAuthorizer)
return &Client{
+ ClusterClient: &ClusterClient,
DataSourcesClient: &DataSourcesClient,
LinkedServicesClient: &LinkedServicesClient,
SavedSearchesClient: &SavedSearchesClient,
diff --git a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
new file mode 100644
index 000000000000..fead449c7289
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
@@ -0,0 +1,393 @@
+package loganalytics
+
+import (
+ "fmt"
+ "log"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/validation"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
+ azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
+)
+
+func resourceArmLogAnalyticsCluster() *schema.Resource {
+ return &schema.Resource{
+ Create: resourceArmLogAnalyticsClusterCreate,
+ Read: resourceArmLogAnalyticsClusterRead,
+ Update: resourceArmLogAnalyticsClusterUpdate,
+ Delete: resourceArmLogAnalyticsClusterDelete,
+
+ Timeouts: &schema.ResourceTimeout{
+ Create: schema.DefaultTimeout(30 * time.Minute),
+ Read: schema.DefaultTimeout(5 * time.Minute),
+ Update: schema.DefaultTimeout(30 * time.Minute),
+ Delete: schema.DefaultTimeout(30 * time.Minute),
+ },
+
+ Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
+ _, err := parse.LogAnalyticsClusterID(id)
+ return err
+ }),
+
+ Schema: map[string]*schema.Schema{
+ "name": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateFunc: validate.LogAnalyticsClustersName,
+ },
+
+ "resource_group_name": azure.SchemaResourceGroupName(),
+
+ "location": azure.SchemaLocation(),
+
+ "identity": {
+ Type: schema.TypeList,
+ Optional: true,
+ ForceNew: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "type": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateFunc: validation.StringInSlice([]string{
+ string(operationalinsights.SystemAssigned),
+ string(operationalinsights.None),
+ }, false),
+ },
+
+ "principal_id": {
+ Type: schema.TypeString,
+ Computed: true,
+ },
+
+ "tenant_id": {
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ },
+ },
+ },
+
+ "next_link": {
+ Type: schema.TypeString,
+ Optional: true,
+ ForceNew: true,
+ },
+
+ "key_vault_property": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "key_name": {
+ Type: schema.TypeString,
+ Optional: true,
+ },
+
+ "key_vault_uri": {
+ Type: schema.TypeString,
+ Optional: true,
+ },
+
+ "key_version": {
+ Type: schema.TypeString,
+ Optional: true,
+ },
+ },
+ },
+ },
+
+ "sku": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "name": {
+ Type: schema.TypeString,
+ Optional: true,
+ ValidateFunc: validation.StringInSlice([]string{
+ string(operationalinsights.CapacityReservation),
+ }, false),
+ },
+
+ "capacity": {
+ Type: schema.TypeInt,
+ Optional: true,
+ },
+ },
+ },
+ },
+
+ "cluster_id": {
+ Type: schema.TypeString,
+ Computed: true,
+ },
+
+ "type": {
+ Type: schema.TypeString,
+ Computed: true,
+ },
+
+ "tags": tags.Schema(),
+ },
+ }
+}
+func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ name := d.Get("name").(string)
+ resourceGroup := d.Get("resource_group_name").(string)
+
+ existing, err := client.Get(ctx, resourceGroup, name)
+ if err != nil {
+ if !utils.ResponseWasNotFound(existing.Response) {
+ return fmt.Errorf("checking for present of existing Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
+ }
+ }
+ if existing.ID != nil && *existing.ID != "" {
+ return tf.ImportAsExistsError("azurerm_operationalinsights_cluster", *existing.ID)
+ }
+
+ parameters := operationalinsights.Cluster{
+ Location: utils.String(location.Normalize(d.Get("location").(string))),
+ Identity: expandArmLogAnalyticsClusterIdentity(d.Get("identity").([]interface{})),
+ ClusterProperties: &operationalinsights.ClusterProperties{
+ NextLink: utils.String(d.Get("next_link").(string)),
+ KeyVaultProperties: expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{})),
+ },
+ Sku: expandArmLogAnalyticsClusterClusterSku(d.Get("sku").([]interface{})),
+ Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
+ }
+ future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters)
+ if err != nil {
+ return fmt.Errorf("creating Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
+ }
+
+ if err := future.WaitForCompletionRef(ctx, client.Client); err != nil {
+ return fmt.Errorf("waiting on creating future for Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
+ }
+
+ resp, err := client.Get(ctx, resourceGroup, name)
+ if err != nil {
+ return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
+ }
+
+ if resp.ID == nil || *resp.ID == "" {
+ return fmt.Errorf("empty or nil ID returned for Log Analytics Cluster %q (Resource Group %q) ID", name, resourceGroup)
+ }
+
+ d.SetId(*resp.ID)
+ return resourceArmLogAnalyticsClusterRead(d, meta)
+}
+
+func resourceArmLogAnalyticsClusterRead(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ id, err := parse.LogAnalyticsClusterID(d.Id())
+ if err != nil {
+ return err
+ }
+
+ resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
+ if err != nil {
+ if utils.ResponseWasNotFound(resp.Response) {
+ log.Printf("[INFO] Log Analytics %q does not exist - removing from state", d.Id())
+ d.SetId("")
+ return nil
+ }
+ return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
+ }
+ d.Set("name", id.Name)
+ d.Set("resource_group_name", id.ResourceGroup)
+ d.Set("location", location.NormalizeNilable(resp.Location))
+ if err := d.Set("identity", flattenArmLogAnalyticsIdentity(resp.Identity)); err != nil {
+ return fmt.Errorf("setting `identity`: %+v", err)
+ }
+ if props := resp.ClusterProperties; props != nil {
+ if err := d.Set("key_vault_property", flattenArmLogAnalyticsKeyVaultProperties(props.KeyVaultProperties)); err != nil {
+ return fmt.Errorf("setting `key_vault_property`: %+v", err)
+ }
+ d.Set("next_link", props.NextLink)
+ d.Set("cluster_id", props.ClusterID)
+ }
+ if err := d.Set("sku", flattenArmLogAnalyticsClusterSku(resp.Sku)); err != nil {
+ return fmt.Errorf("setting `sku`: %+v", err)
+ }
+ d.Set("type", resp.Type)
+ return tags.FlattenAndSet(d, resp.Tags)
+}
+
+func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ id, err := parse.LogAnalyticsClusterID(d.Id())
+ if err != nil {
+ return err
+ }
+
+ parameters := operationalinsights.ClusterPatch{
+ ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{},
+ }
+ if d.HasChange("key_vault_property") {
+ parameters.ClusterPatchProperties.KeyVaultProperties = expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
+ }
+ if d.HasChange("sku") {
+ parameters.Sku = expandArmLogAnalyticsClusterClusterSku(d.Get("sku").([]interface{}))
+ }
+ if d.HasChange("tags") {
+ parameters.Tags = tags.Expand(d.Get("tags").(map[string]interface{}))
+ }
+
+ if _, err := client.Update(ctx, id.ResourceGroup, id.Name, parameters); err != nil {
+ return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
+ }
+ return resourceArmLogAnalyticsClusterRead(d, meta)
+}
+
+func resourceArmLogAnalyticsClusterDelete(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ id, err := parse.LogAnalyticsClusterID(d.Id())
+ if err != nil {
+ return err
+ }
+
+ future, err := client.Delete(ctx, id.ResourceGroup, id.Name)
+ if err != nil {
+ return fmt.Errorf("deleting Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
+ }
+
+ if err := future.WaitForCompletionRef(ctx, client.Client); err != nil {
+ return fmt.Errorf("waiting on deleting future for Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
+ }
+ return nil
+}
+
+func expandArmLogAnalyticsClusterIdentity(input []interface{}) *operationalinsights.Identity {
+ if len(input) == 0 {
+ return nil
+ }
+ v := input[0].(map[string]interface{})
+ return &operationalinsights.Identity{
+ Type: operationalinsights.IdentityType(v["type"].(string)),
+ }
+}
+
+func expandArmLogAnalyticsClusterKeyVaultProperties(input []interface{}) *operationalinsights.KeyVaultProperties {
+ if len(input) == 0 {
+ return nil
+ }
+ v := input[0].(map[string]interface{})
+ return &operationalinsights.KeyVaultProperties{
+ KeyVaultURI: utils.String(v["key_vault_uri"].(string)),
+ KeyName: utils.String(v["key_name"].(string)),
+ KeyVersion: utils.String(v["key_version"].(string)),
+ }
+}
+
+func expandArmLogAnalyticsClusterClusterSku(input []interface{}) *operationalinsights.ClusterSku {
+ if len(input) == 0 {
+ return nil
+ }
+ v := input[0].(map[string]interface{})
+ return &operationalinsights.ClusterSku{
+ Capacity: utils.Int64(int64(v["capacity"].(int))),
+ Name: operationalinsights.ClusterSkuNameEnum(v["name"].(string)),
+ }
+}
+
+func flattenArmLogAnalyticsIdentity(input *operationalinsights.Identity) []interface{} {
+ if input == nil {
+ return make([]interface{}, 0)
+ }
+
+ var t operationalinsights.IdentityType
+ if input.Type != "" {
+ t = input.Type
+ }
+ var principalId string
+ if input.PrincipalID != nil {
+ principalId = *input.PrincipalID
+ }
+ var tenantId string
+ if input.TenantID != nil {
+ tenantId = *input.TenantID
+ }
+ return []interface{}{
+ map[string]interface{}{
+ "type": t,
+ "principal_id": principalId,
+ "tenant_id": tenantId,
+ },
+ }
+}
+
+func flattenArmLogAnalyticsKeyVaultProperties(input *operationalinsights.KeyVaultProperties) []interface{} {
+ if input == nil {
+ return make([]interface{}, 0)
+ }
+
+ var keyName string
+ if input.KeyName != nil {
+ keyName = *input.KeyName
+ }
+ var keyVaultUri string
+ if input.KeyVaultURI != nil {
+ keyVaultUri = *input.KeyVaultURI
+ }
+ var keyVersion string
+ if input.KeyVersion != nil {
+ keyVersion = *input.KeyVersion
+ }
+ return []interface{}{
+ map[string]interface{}{
+ "key_name": keyName,
+ "key_vault_uri": keyVaultUri,
+ "key_version": keyVersion,
+ },
+ }
+}
+
+func flattenArmLogAnalyticsClusterSku(input *operationalinsights.ClusterSku) []interface{} {
+ if input == nil {
+ return make([]interface{}, 0)
+ }
+
+ var name operationalinsights.ClusterSkuNameEnum
+ if input.Name != "" {
+ name = input.Name
+ }
+ var capacity int64
+ if input.Capacity != nil {
+ capacity = *input.Capacity
+ }
+ return []interface{}{
+ map[string]interface{}{
+ "name": name,
+ "capacity": capacity,
+ },
+ }
+}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go
new file mode 100644
index 000000000000..b469ec0e5a61
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go
@@ -0,0 +1,31 @@
+package parse
+
+import (
+ "fmt"
+
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
+)
+
+type LogAnalyticsClusterId struct {
+ ResourceGroup string
+ Name string
+}
+
+func LogAnalyticsClusterID(input string) (*LogAnalyticsClusterId, error) {
+ id, err := azure.ParseAzureResourceID(input)
+ if err != nil {
+ return nil, fmt.Errorf("parsing operationalinsightsCluster ID %q: %+v", input, err)
+ }
+
+ logAnalyticsCluster := LogAnalyticsClusterId{
+ ResourceGroup: id.ResourceGroup,
+ }
+ if logAnalyticsCluster.Name, err = id.PopSegment("clusters"); err != nil {
+ return nil, err
+ }
+ if err := id.ValidateNoEmptySegments(input); err != nil {
+ return nil, err
+ }
+
+ return &logAnalyticsCluster, nil
+}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go
new file mode 100644
index 000000000000..02eae7e4ad71
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster_test.go
@@ -0,0 +1,72 @@
+package parse
+
+import (
+ "testing"
+)
+
+func TestLogAnalyticsClusterID(t *testing.T) {
+ testData := []struct {
+ Name string
+ Input string
+ Expected *LogAnalyticsClusterId
+ }{
+ {
+ Name: "Empty",
+ Input: "",
+ Expected: nil,
+ },
+ {
+ Name: "No Resource Groups Segment",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000",
+ Expected: nil,
+ },
+ {
+ Name: "No Resource Groups Value",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/",
+ Expected: nil,
+ },
+ {
+ Name: "Resource Group ID",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/",
+ Expected: nil,
+ },
+ {
+ Name: "Missing Cluster Value",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/clusters",
+ Expected: nil,
+ },
+ {
+ Name: "Log Analytics Cluster ID",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/clusters/cluster1",
+ Expected: &LogAnalyticsClusterId{
+ ResourceGroup: "resourceGroup1",
+ Name: "cluster1",
+ },
+ },
+ {
+ Name: "Wrong Casing",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/Clusters/cluster1",
+ Expected: nil,
+ },
+ }
+
+ for _, v := range testData {
+ t.Logf("[DEBUG] Testing %q..", v.Name)
+
+ actual, err := LogAnalyticsClusterID(v.Input)
+ if err != nil {
+ if v.Expected == nil {
+ continue
+ }
+ t.Fatalf("Expected a value but got an error: %s", err)
+ }
+
+ if actual.ResourceGroup != v.Expected.ResourceGroup {
+ t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup)
+ }
+
+ if actual.Name != v.Expected.Name {
+ t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name)
+ }
+ }
+}
diff --git a/azurerm/internal/services/loganalytics/registration.go b/azurerm/internal/services/loganalytics/registration.go
index 5f3a80598a7a..a10f1db910f8 100644
--- a/azurerm/internal/services/loganalytics/registration.go
+++ b/azurerm/internal/services/loganalytics/registration.go
@@ -27,11 +27,12 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource {
// SupportedResources returns the supported Resources supported by this Service
func (r Registration) SupportedResources() map[string]*schema.Resource {
return map[string]*schema.Resource{
+ "azurerm_log_analytics_cluster": resourceArmLogAnalyticsCluster(),
+ "azurerm_log_analytics_datasource_windows_event": resourceArmLogAnalyticsDataSourceWindowsEvent(),
+ "azurerm_log_analytics_datasource_windows_performance_counter": resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter(),
"azurerm_log_analytics_linked_service": resourceArmLogAnalyticsLinkedService(),
"azurerm_log_analytics_saved_search": resourceArmLogAnalyticsSavedSearch(),
"azurerm_log_analytics_solution": resourceArmLogAnalyticsSolution(),
"azurerm_log_analytics_workspace": resourceArmLogAnalyticsWorkspace(),
- "azurerm_log_analytics_datasource_windows_event": resourceArmLogAnalyticsDataSourceWindowsEvent(),
- "azurerm_log_analytics_datasource_windows_performance_counter": resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter(),
}
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
new file mode 100644
index 000000000000..3e88c5236868
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
@@ -0,0 +1,27 @@
+package validate
+
+import (
+ "fmt"
+ "regexp"
+)
+
+func LogAnalyticsClustersName(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return
+ }
+ if len(v) < 4 {
+ errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
+ return
+ }
+ if len(v) > 63 {
+ errors = append(errors, fmt.Errorf("length should be less than %d", 63))
+ return
+ }
+ if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
+ errors = append(errors, fmt.Errorf("expected value of %s not match regular expression, got %v", k, v))
+ return
+ }
+ return
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go
new file mode 100644
index 000000000000..4b01554f917e
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go
@@ -0,0 +1,56 @@
+package validate
+
+import (
+ "testing"
+)
+
+func TestLogAnalyticsClustersName(t *testing.T) {
+ testCases := []struct {
+ Input string
+ Expected bool
+ }{
+ {
+ Input: "inv",
+ Expected: false,
+ },
+ {
+ Input: "invalid_Cluster_Name",
+ Expected: false,
+ },
+ {
+ Input: "invalid Cluster Name",
+ Expected: false,
+ },
+ {
+ Input: "-invalidClusterName",
+ Expected: false,
+ },
+ {
+ Input: "invalidClusterName-",
+ Expected: false,
+ },
+ {
+ Input: "validClusterName",
+ Expected: true,
+ },
+ {
+ Input: "validClusterName-2",
+ Expected: true,
+ },
+ {
+ Input: "thisIsTheLooooooooooooooooooooooooongestValidClusterNameThereIs",
+ Expected: true,
+ },
+ {
+ Input: "vali",
+ Expected: true,
+ },
+ }
+ for _, v := range testCases {
+ _, errors := LogAnalyticsClustersName(v.Input, "name")
+ result := len(errors) == 0
+ if result != v.Expected {
+ t.Fatalf("Expected the result to be %t but got %t (and %d errors)", v.Expected, result, len(errors))
+ }
+ }
+}
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
new file mode 100644
index 000000000000..c6c93fcfe61e
--- /dev/null
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -0,0 +1,117 @@
+---
+subcategory: "Log Analytics"
+layout: "azurerm"
+page_title: "Azure Resource Manager: azurerm_log_analytics_cluster"
+description: |-
+ Manages a Log Analytics Cluster.
+---
+
+# azurerm_log_analytics_cluster
+
+Manages a Log Analytics Cluster.
+
+## Example Usage
+
+```hcl
+provider "azurerm" {
+ features {}
+}
+
+resource "azurerm_resource_group" "example" {
+ name = "example-resources"
+ location = "West Europe"
+}
+
+resource "azurerm_log_analytics_cluster" "example" {
+ name = "example-cluster"
+ resource_group_name = azurerm_resource_group.example.name
+ location = azurerm_resource_group.example.location
+}
+```
+
+## Arguments Reference
+
+The following arguments are supported:
+
+* `name` - (Required) The name which should be used for this Log Analytics Cluster. Changing this forces a new Log Analytics Cluster to be created.
+
+* `resource_group_name` - (Required) The name of the Resource Group where the Log Analytics Cluster should exist. Changing this forces a new Log Analytics Cluster to be created.
+
+* `location` - (Required) The Azure Region where the Log Analytics Cluster should exist. Changing this forces a new Log Analytics Cluster to be created.
+
+---
+
+* `identity` - (Optional) A `identity` block as defined below. Changing this forces a new Log Analytics Cluster to be created.
+
+* `next_link` - (Optional) The link used to get the next page of recommendations. Changing this forces a new Log Analytics Cluster to be created.
+
+* `key_vault_property` - (Optional) A `key_vault_property` block as defined below.
+
+* `sku` - (Optional) A `sku` block as defined below.
+
+* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Cluster.
+
+---
+
+An `identity` block supports the following:
+
+* `type` - (Required) Specifies the identity type of the Log Analytics Cluster. At this time the only allowed value is `SystemAssigned`.
+
+~> **NOTE:** The assigned `principal_id` and `tenant_id` can be retrieved after the identity `type` has been set to `SystemAssigned` and the Log Analytics Cluster has been created. More details are available below.
+
+---
+
+An `key_vault_property` block exports the following:
+
+* `key_name` - (Optional) The name of the key associated with the Log Analytics cluster.
+
+* `key_vault_uri` - (Optional) The Key Vault uri which holds they key associated with the Log Analytics cluster.
+
+* `key_version` - (Optional) The version of the key associated with the Log Analytics cluster.
+
+---
+
+An `sku` block exports the following:
+
+* `name` - (Optional) The name which should be used for this sku. Possible value is "CapacityReservation" is allowed.
+
+* `capacity` - (Optional) The capacity value.
+
+## Attributes Reference
+
+In addition to the Arguments listed above - the following Attributes are exported:
+
+* `id` - The ID of the Log Analytics Cluster.
+
+* `identity` - A `identity` block as defined below.
+
+* `cluster_id` - The ID of the cluster.
+
+* `type` - The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
+
+---
+
+An `identity` block exports the following:
+
+* `principal_id` - The Principal ID for the Service Principal associated with the Identity of this Log Analytics Cluster.
+
+* `tenant_id` - The Tenant ID for the Service Principal associated with the Identity of this Log Analytics Cluster.
+
+-> You can access the Principal ID via `azurerm_log_analytics_cluster.example.identity.0.principal_id` and the Tenant ID via `azurerm_log_analytics_cluster.example.identity.0.tenant_id`
+
+## Timeouts
+
+The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
+
+* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Cluster.
+* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Cluster.
+* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Cluster.
+* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Cluster.
+
+## Import
+
+Log Analytics Clusters can be imported using the `resource id`, e.g.
+
+```shell
+terraform import azurerm_log_analytics_cluster.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/clusters/cluster1
+```
\ No newline at end of file
From 9309d6617752802348ce3ddef9c41648d4586144 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Fri, 23 Oct 2020 22:32:30 -0700
Subject: [PATCH 02/46] Initial checkin of the resource
---
.../services/loganalytics/client/client.go | 33 ++-
...lytics_storage_insights_config_resource.go | 205 ++++++++++++++
.../log_analytics_storage_insight_config.go | 35 +++
...g_analytics_storage_insight_config_test.go | 77 ++++++
.../services/loganalytics/registration.go | 1 +
...cs_storage_insight_config_resource_test.go | 257 ++++++++++++++++++
.../validate/log_analytics_data_export.go | 43 +--
.../log_analytics_data_export_test.go | 8 +-
.../validate/log_analytics_name.go | 27 ++
.../log_analytics_storage_insight_config.go | 5 +
...g_analytics_storage_insight_config_test.go | 60 ++++
website/azurerm.erb | 4 +
.../r/log_analytics_data_export.html.markdown | 2 +-
...ytics_storage_insight_config.html.markdown | 74 +++++
14 files changed, 773 insertions(+), 58 deletions(-)
create mode 100644 azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
create mode 100644 azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
create mode 100644 azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
create mode 100644 azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
create mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_name.go
create mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
create mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
create mode 100644 website/docs/r/log_analytics_storage_insight_config.html.markdown
diff --git a/azurerm/internal/services/loganalytics/client/client.go b/azurerm/internal/services/loganalytics/client/client.go
index aab7672fdb0a..5ae7432a8d32 100644
--- a/azurerm/internal/services/loganalytics/client/client.go
+++ b/azurerm/internal/services/loganalytics/client/client.go
@@ -7,13 +7,14 @@ import (
)
type Client struct {
- DataExportClient *operationalinsights.DataExportsClient
- DataSourcesClient *operationalinsights.DataSourcesClient
- LinkedServicesClient *operationalinsights.LinkedServicesClient
- SavedSearchesClient *operationalinsights.SavedSearchesClient
- SharedKeysClient *operationalinsights.SharedKeysClient
- SolutionsClient *operationsmanagement.SolutionsClient
- WorkspacesClient *operationalinsights.WorkspacesClient
+ DataExportClient *operationalinsights.DataExportsClient
+ DataSourcesClient *operationalinsights.DataSourcesClient
+ LinkedServicesClient *operationalinsights.LinkedServicesClient
+ SavedSearchesClient *operationalinsights.SavedSearchesClient
+ SharedKeysClient *operationalinsights.SharedKeysClient
+ SolutionsClient *operationsmanagement.SolutionsClient
+ StorageInsightConfigClient *operationalinsights.StorageInsightConfigsClient
+ WorkspacesClient *operationalinsights.WorkspacesClient
}
func NewClient(o *common.ClientOptions) *Client {
@@ -35,16 +36,20 @@ func NewClient(o *common.ClientOptions) *Client {
SolutionsClient := operationsmanagement.NewSolutionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId, "Microsoft.OperationsManagement", "solutions", "testing")
o.ConfigureClient(&SolutionsClient.Client, o.ResourceManagerAuthorizer)
+ StorageInsightConfigClient := operationalinsights.NewStorageInsightConfigsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
+ o.ConfigureClient(&StorageInsightConfigClient.Client, o.ResourceManagerAuthorizer)
+
LinkedServicesClient := operationalinsights.NewLinkedServicesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&LinkedServicesClient.Client, o.ResourceManagerAuthorizer)
return &Client{
- DataExportClient: &DataExportClient,
- DataSourcesClient: &DataSourcesClient,
- LinkedServicesClient: &LinkedServicesClient,
- SavedSearchesClient: &SavedSearchesClient,
- SharedKeysClient: &SharedKeysClient,
- SolutionsClient: &SolutionsClient,
- WorkspacesClient: &WorkspacesClient,
+ DataExportClient: &DataExportClient,
+ DataSourcesClient: &DataSourcesClient,
+ LinkedServicesClient: &LinkedServicesClient,
+ SavedSearchesClient: &SavedSearchesClient,
+ SharedKeysClient: &SharedKeysClient,
+ SolutionsClient: &SolutionsClient,
+ StorageInsightConfigClient: &StorageInsightConfigClient,
+ WorkspacesClient: &WorkspacesClient,
}
}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
new file mode 100644
index 000000000000..cad65b150195
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
@@ -0,0 +1,205 @@
+package loganalytics
+
+import (
+ "fmt"
+ "log"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/terraform-providers/terraform-provider-azuread/azuread/helpers/validate"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
+ azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
+)
+
+func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
+ return &schema.Resource{
+ Create: resourceArmLogAnalyticsStorageInsightConfigCreateUpdate,
+ Read: resourceArmLogAnalyticsStorageInsightConfigRead,
+ Update: resourceArmLogAnalyticsStorageInsightConfigCreateUpdate,
+ Delete: resourceArmLogAnalyticsStorageInsightConfigDelete,
+
+ Timeouts: &schema.ResourceTimeout{
+ Create: schema.DefaultTimeout(30 * time.Minute),
+ Read: schema.DefaultTimeout(5 * time.Minute),
+ Update: schema.DefaultTimeout(30 * time.Minute),
+ Delete: schema.DefaultTimeout(30 * time.Minute),
+ },
+
+ Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
+ _, err := parse.LogAnalyticsStorageInsightConfigID(id)
+ return err
+ }),
+
+ Schema: map[string]*schema.Schema{
+ "name": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ },
+
+ "resource_group_name": azure.SchemaResourceGroupName(),
+
+ "workspace_resource_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateFunc: azure.ValidateResourceID,
+ },
+
+ "container_names": {
+ Type: schema.TypeSet,
+ Optional: true,
+ Elem: &schema.Schema{
+ Type: schema.TypeString,
+ },
+ },
+
+ "storage_account_resource_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ValidateFunc: azure.ValidateResourceID,
+ },
+
+ "table_names": {
+ Type: schema.TypeSet,
+ Required: true,
+ MinItems: 1,
+ Elem: &schema.Schema{
+ Type: schema.TypeString,
+ ValidateFunc: validate.NoEmptyStrings,
+ },
+ },
+
+ "tags": tags.Schema(),
+ },
+ }
+}
+func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.StorageInsightConfigClient
+ ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ name := d.Get("name").(string)
+ resourceGroup := d.Get("resource_group_name").(string)
+ workspaceName := d.Get("workspace_name").(string)
+
+ if d.IsNewResource() {
+ existing, err := client.Get(ctx, resourceGroup, workspaceName, name)
+ if err != nil {
+ if !utils.ResponseWasNotFound(existing.Response) {
+ return fmt.Errorf("checking for present of existing LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspaceName, err)
+ }
+ }
+ if existing.ID != nil && *existing.ID != "" {
+ return tf.ImportAsExistsError("azurerm_log_analytics_storage_insight_config", *existing.ID)
+ }
+ }
+
+ parameters := operationalinsights.StorageInsight{
+ ETag: utils.String(d.Get("e_tag").(string)),
+ StorageInsightProperties: &operationalinsights.StorageInsightProperties{
+ Containers: utils.ExpandStringSlice(d.Get("containers").(*schema.Set).List()),
+ StorageAccount: expandArmStorageInsightConfigStorageAccount(d.Get("storage_account").([]interface{})),
+ Tables: utils.ExpandStringSlice(d.Get("tables").(*schema.Set).List()),
+ },
+ Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
+ }
+ if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspaceName, name, parameters); err != nil {
+ return fmt.Errorf("creating/updating LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspaceName, err)
+ }
+
+ resp, err := client.Get(ctx, resourceGroup, workspaceName, name)
+ if err != nil {
+ return fmt.Errorf("retrieving LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspaceName, err)
+ }
+
+ if resp.ID == nil || *resp.ID == "" {
+ return fmt.Errorf("empty or nil ID returned for LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspaceName)
+ }
+
+ d.SetId(*resp.ID)
+ return resourceArmLogAnalyticsStorageInsightConfigRead(d, meta)
+}
+
+func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.StorageInsightConfigClient
+ ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ id, err := parse.LogAnalyticsStorageInsightConfigID(d.Id())
+ if err != nil {
+ return err
+ }
+
+ resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name)
+ if err != nil {
+ if utils.ResponseWasNotFound(resp.Response) {
+ log.Printf("[INFO] operationalinsights %q does not exist - removing from state", d.Id())
+ d.SetId("")
+ return nil
+ }
+ return fmt.Errorf("retrieving LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
+ }
+ d.Set("name", id.Name)
+ d.Set("resource_group_name", id.ResourceGroup)
+ d.Set("workspace_name", id.WorkspaceName)
+ d.Set("e_tag", resp.ETag)
+ if props := resp.StorageInsightProperties; props != nil {
+ d.Set("containers", utils.FlattenStringSlice(props.Containers))
+ if err := d.Set("storage_account", flattenArmStorageInsightConfigStorageAccount(props.StorageAccount)); err != nil {
+ return fmt.Errorf("setting `storage_account`: %+v", err)
+ }
+ d.Set("tables", utils.FlattenStringSlice(props.Tables))
+ }
+ d.Set("type", resp.Type)
+ return tags.FlattenAndSet(d, resp.Tags)
+}
+
+func resourceArmLogAnalyticsStorageInsightConfigDelete(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.StorageInsightConfigClient
+ ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ id, err := parse.LogAnalyticsStorageInsightConfigID(d.Id())
+ if err != nil {
+ return err
+ }
+
+ if _, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil {
+ return fmt.Errorf("deleting LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
+ }
+ return nil
+}
+
+func expandArmStorageInsightConfigStorageAccount(input []interface{}) *operationalinsights.StorageAccount {
+ if len(input) == 0 {
+ return nil
+ }
+ v := input[0].(map[string]interface{})
+ return &operationalinsights.StorageAccount{
+ Key: utils.String(v["key"].(string)),
+ }
+}
+
+func flattenArmStorageInsightConfigStorageAccount(input *operationalinsights.StorageAccount) []interface{} {
+ if input == nil {
+ return make([]interface{}, 0)
+ }
+
+ var key string
+ if input.Key != nil {
+ key = *input.Key
+ }
+ return []interface{}{
+ map[string]interface{}{
+ "key": key,
+ },
+ }
+}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
new file mode 100644
index 000000000000..c8be8d1ed985
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
@@ -0,0 +1,35 @@
+package parse
+
+import (
+ "fmt"
+
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
+)
+
+type LogAnalyticsStorageInsightConfigId struct {
+ ResourceGroup string
+ WorkspaceName string
+ Name string
+}
+
+func LogAnalyticsStorageInsightConfigID(input string) (*LogAnalyticsStorageInsightConfigId, error) {
+ id, err := azure.ParseAzureResourceID(input)
+ if err != nil {
+ return nil, fmt.Errorf("parsing LogAnalyticsStorageInsightConfig ID %q: %+v", input, err)
+ }
+
+ logAnalyticsStorageInsightConfig := LogAnalyticsStorageInsightConfigId{
+ ResourceGroup: id.ResourceGroup,
+ }
+ if logAnalyticsStorageInsightConfig.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
+ return nil, err
+ }
+ if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
+ return nil, err
+ }
+ if err := id.ValidateNoEmptySegments(input); err != nil {
+ return nil, err
+ }
+
+ return &logAnalyticsStorageInsightConfig, nil
+}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
new file mode 100644
index 000000000000..a739e07c02db
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
@@ -0,0 +1,77 @@
+package parse
+
+import (
+ "testing"
+)
+
+func TestLogAnalyticsStorageInsightConfigID(t *testing.T) {
+ testData := []struct {
+ Name string
+ Input string
+ Expected *LogAnalyticsStorageInsightConfigId
+ }{
+ {
+ Name: "Empty",
+ Input: "",
+ Expected: nil,
+ },
+ {
+ Name: "No Resource Groups Segment",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000",
+ Expected: nil,
+ },
+ {
+ Name: "No Resource Groups Value",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/",
+ Expected: nil,
+ },
+ {
+ Name: "Resource Group ID",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/",
+ Expected: nil,
+ },
+ {
+ Name: "Missing StorageInsightConfig Value",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs",
+ Expected: nil,
+ },
+ {
+ Name: "operationalinsights StorageInsightConfig ID",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1",
+ Expected: &LogAnalyticsStorageInsightConfigId{
+ ResourceGroup: "resourceGroup1",
+ WorkspaceName: "workspace1",
+ Name: "storageInsight1",
+ },
+ },
+ {
+ Name: "Wrong Casing",
+ Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/StorageInsightConfigs/storageInsight1",
+ Expected: nil,
+ },
+ }
+
+ for _, v := range testData {
+ t.Logf("[DEBUG] Testing %q..", v.Name)
+
+ actual, err := LogAnalyticsStorageInsightConfigID(v.Input)
+ if err != nil {
+ if v.Expected == nil {
+ continue
+ }
+ t.Fatalf("Expected a value but got an error: %s", err)
+ }
+
+ if actual.ResourceGroup != v.Expected.ResourceGroup {
+ t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup)
+ }
+
+ if actual.WorkspaceName != v.Expected.WorkspaceName {
+ t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName)
+ }
+
+ if actual.Name != v.Expected.Name {
+ t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name)
+ }
+ }
+}
diff --git a/azurerm/internal/services/loganalytics/registration.go b/azurerm/internal/services/loganalytics/registration.go
index b6cc2bac3f79..b22121bf8d6d 100644
--- a/azurerm/internal/services/loganalytics/registration.go
+++ b/azurerm/internal/services/loganalytics/registration.go
@@ -33,6 +33,7 @@ func (r Registration) SupportedResources() map[string]*schema.Resource {
"azurerm_log_analytics_solution": resourceArmLogAnalyticsSolution(),
"azurerm_log_analytics_workspace": resourceArmLogAnalyticsWorkspace(),
"azurerm_log_analytics_datasource_windows_event": resourceArmLogAnalyticsDataSourceWindowsEvent(),
+ "azurerm_log_analytics_storage_insight_config": resourceArmLogAnalyticsStorageInsightConfig(),
"azurerm_log_analytics_datasource_windows_performance_counter": resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter(),
}
}
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
new file mode 100644
index 000000000000..999dfeef4790
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
@@ -0,0 +1,257 @@
+package tests
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
+)
+
+func TestAccAzureRMLogAnalyticsStorageInsightConfig_basic(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport),
+ },
+ })
+}
+
+func TestAccAzureRMLogAnalyticsStorageInsightConfig_complete(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMLogAnalyticsStorageInsightConfig_update(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func testCheckAzureRMLogAnalyticsStorageInsightConfigExists(resourceName string) resource.TestCheckFunc {
+ return func(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.StorageInsightConfigClient
+ ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
+ rs, ok := s.RootModule().Resources[resourceName]
+ if !ok {
+ return fmt.Errorf("Log Analytics Storage Insight Config not found: %s", resourceName)
+ }
+ id, err := parse.LogAnalyticsStorageInsightConfigID(rs.Primary.ID)
+ if err != nil {
+ return err
+ }
+ if resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil {
+ if !utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("bad: Log Analytics Storage Insight Config %q does not exist", id.Name)
+ }
+ return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightConfigClient: %+v", err)
+ }
+ return nil
+ }
+}
+
+func testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.StorageInsightConfigClient
+ ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
+
+ for _, rs := range s.RootModule().Resources {
+ if rs.Type != "azurerm_log_analytics_storage_insight_config" {
+ continue
+ }
+ id, err := parse.LogAnalyticsStorageInsightConfigID(rs.Primary.ID)
+ if err != nil {
+ return err
+ }
+ if resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil {
+ if !utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightConfigClient: %+v", err)
+ }
+ }
+ return nil
+ }
+ return nil
+}
+
+func testAccAzureRMLogAnalyticsStorageInsightConfig_template(data acceptance.TestData) string {
+ return fmt.Sprintf(`
+provider "azurerm" {
+ features {}
+}
+
+resource "azurerm_resource_group" "test" {
+ name = "acctest-la-%d"
+ location = "%s"
+}
+
+resource "azurerm_log_analytics_workspace" "test" {
+ name = "acctest-law-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+}
+`, data.RandomInteger, data.Locations.Primary, data.RandomInteger)
+}
+
+func testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsightConfig_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_storage_insight_config" "test" {
+ name = "acctest-lasic-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_name = azurerm_log_analytics_workspace.test.name
+}
+`, template, data.RandomInteger)
+}
+
+func testAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport(data acceptance.TestData) string {
+ config := testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_storage_insight_config" "import" {
+ name = azurerm_log_analytics_storage_insight_config.test.name
+ resource_group_name = azurerm_log_analytics_storage_insight_config.test.resource_group_name
+ workspace_name = azurerm_log_analytics_storage_insight_config.test.workspace_name
+}
+`, config)
+}
+
+func testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsightConfig_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_storage_insight_config" "test" {
+ name = "acctest-lasic-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_name = azurerm_log_analytics_workspace.test.name
+ containers = ["wad-iis-logfiles"]
+ e_tag = ""
+ storage_account {
+ key = "1234"
+ }
+ tables = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
+ tags = {
+ ENV = "Test"
+ }
+}
+`, template, data.RandomInteger)
+}
+
+func testAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsightConfig_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_storage_insight_config" "test" {
+ name = "acctest-lasic-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_name = azurerm_log_analytics_workspace.test.name
+ containers = ["wad-iis-logfiles"]
+ e_tag = ""
+ storage_account {
+ key = "1234"
+ }
+ tables = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
+ tags = {
+ ENV = "Test"
+ }
+}
+`, template, data.RandomInteger)
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go
index 5335c90da758..80b4f6ca7051 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go
@@ -1,48 +1,9 @@
package validate
-import (
- "fmt"
- "regexp"
-)
-
func LogAnalyticsDataExportWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
- v, ok := i.(string)
- if !ok {
- errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
- return
- }
- if len(v) < 4 {
- errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
- return
- }
- if len(v) > 63 {
- errors = append(errors, fmt.Errorf("length should be less than %d", 63))
- return
- }
- if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
- errors = append(errors, fmt.Errorf("expected value of %s not match regular expression, got %v", k, v))
- return
- }
- return
+ return LogAnalyticsGenericName(i, k)
}
func LogAnalyticsDataExportName(i interface{}, k string) (warnings []string, errors []error) {
- v, ok := i.(string)
- if !ok {
- errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
- return
- }
- if len(v) < 4 {
- errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
- return
- }
- if len(v) > 63 {
- errors = append(errors, fmt.Errorf("length should be less than %d", 63))
- return
- }
- if !regexp.MustCompile(`^[A-Za-z][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
- errors = append(errors, fmt.Errorf("expected value of %s not match regular expression, got %v", k, v))
- return
- }
- return
+ return LogAnalyticsGenericName(i, k)
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go
index 487a6e4672af..38b9b8b55502 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go
@@ -29,6 +29,10 @@ func TestLogAnalyticsDataExportName(t *testing.T) {
Input: "invalidExportsName-",
Expected: false,
},
+ {
+ Input: "thisIsToLooooooooooooooooooooooooooooooooooooooongForAExportName",
+ Expected: false,
+ },
{
Input: "validExportsName",
Expected: true,
@@ -38,7 +42,7 @@ func TestLogAnalyticsDataExportName(t *testing.T) {
Expected: true,
},
{
- Input: "thisIsTheLooooooooooooooooooooooooongestValidExportsNameThereIs",
+ Input: "thisIsTheLoooooooooooooooooooooooooongestValidExportNameThereIs",
Expected: true,
},
{
@@ -50,7 +54,7 @@ func TestLogAnalyticsDataExportName(t *testing.T) {
_, errors := LogAnalyticsDataExportName(v.Input, "name")
result := len(errors) == 0
if result != v.Expected {
- t.Fatalf("Expected the result to be %t but got %t (and %d errors)", v.Expected, result, len(errors))
+ t.Fatalf("Expected the result to be %q but got %q (and %d errors)", v.Expected, result, len(errors))
}
}
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
new file mode 100644
index 000000000000..ad416ca621ee
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
@@ -0,0 +1,27 @@
+package validate
+
+import (
+ "fmt"
+ "regexp"
+)
+
+func LogAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return
+ }
+ if len(v) < 4 {
+ errors = append(errors, fmt.Errorf("%q length should be greater than %d", k, 4))
+ return
+ }
+ if len(v) > 63 {
+ errors = append(errors, fmt.Errorf("%q length should be less than %d", k, 63))
+ return
+ }
+ if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
+ errors = append(errors, fmt.Errorf("the %q is invalid, the %q must begin with an alphanumeric character, end with an alphanumeric character and may only contain alphanumeric characters or hyphens, got %q", k, k, v))
+ return
+ }
+ return
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
new file mode 100644
index 000000000000..6cc625e3ed8e
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
@@ -0,0 +1,5 @@
+package validate
+
+func LogAnalyticsStorageInsightConfigWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
+ return LogAnalyticsGenericName(i, k)
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
new file mode 100644
index 000000000000..eb1963bd8bc4
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
@@ -0,0 +1,60 @@
+package validate
+
+import (
+ "testing"
+)
+
+func TestLogAnalyticsStorageInsightConfigWorkspaceName(t *testing.T) {
+ testCases := []struct {
+ Input string
+ Expected bool
+ }{
+ {
+ Input: "inv",
+ Expected: false,
+ },
+ {
+ Input: "invalid_Exports_Name",
+ Expected: false,
+ },
+ {
+ Input: "invalid Storage Insight Config Name Name",
+ Expected: false,
+ },
+ {
+ Input: "-invalidStorageInsightConfigName",
+ Expected: false,
+ },
+ {
+ Input: "invalidStorageInsightConfigName-",
+ Expected: false,
+ },
+ {
+ Input: "thisIsToLoooooooooooooooooooooongestForAStorageInsightConfigName",
+ Expected: true,
+ },
+ {
+ Input: "validStorageInsightConfigName",
+ Expected: true,
+ },
+ {
+ Input: "validStorageInsightConfigName-2",
+ Expected: true,
+ },
+ {
+ Input: "thisIsTheLoooooooooooongestValidStorageInsightConfigNameThereIs",
+ Expected: true,
+ },
+ {
+ Input: "vali",
+ Expected: true,
+ },
+ }
+ for _, v := range testCases {
+ _, errors := LogAnalyticsStorageInsightConfigWorkspaceName(v.Input, "workspace_name")
+ result := len(errors) == 0
+ if result != v.Expected {
+ t.Fatalf("Expected the result to be %q but got %q (and %d errors)", v.Expected, result, len(errors))
+ }
+ }
+}
diff --git a/website/azurerm.erb b/website/azurerm.erb
index 842664bff5ed..3dde6ac0e594 100644
--- a/website/azurerm.erb
+++ b/website/azurerm.erb
@@ -2023,6 +2023,10 @@
azurerm_log_analytics_solution
+
+ azurerm_log_analytics_storage_insight_config
+
+
azurerm_log_analytics_workspace
diff --git a/website/docs/r/log_analytics_data_export.html.markdown b/website/docs/r/log_analytics_data_export.html.markdown
index b96f4437e167..f6743d7ab6a0 100644
--- a/website/docs/r/log_analytics_data_export.html.markdown
+++ b/website/docs/r/log_analytics_data_export.html.markdown
@@ -72,8 +72,8 @@ In addition to the Arguments listed above - the following Attributes are exporte
The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Data Export Rule.
-* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Data Export Rule.
* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Data Export Rule.
+* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Data Export Rule.
* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Data Export Rule.
## Import
diff --git a/website/docs/r/log_analytics_storage_insight_config.html.markdown b/website/docs/r/log_analytics_storage_insight_config.html.markdown
new file mode 100644
index 000000000000..0db1d1e43581
--- /dev/null
+++ b/website/docs/r/log_analytics_storage_insight_config.html.markdown
@@ -0,0 +1,74 @@
+---
+subcategory: "Log Analytics"
+layout: "azurerm"
+page_title: "Azure Resource Manager: azurerm_log_analytics_storage_insight_config"
+description: |-
+ Manages a Log Analytics Storage Insight Config.
+---
+
+# azurerm_log_analytics_storage_insight_config
+
+Manages a Log Analytics Storage Insight Config.
+
+## Example Usage
+
+```hcl
+resource "azurerm_resource_group" "example" {
+ name = "example-resources"
+ location = "West Europe"
+}
+
+resource "azurerm_log_analytics_workspace" "example" {
+ name = "example-workspace"
+ resource_group_name = azurerm_resource_group.example.name
+ location = azurerm_resource_group.example.location
+}
+
+resource "azurerm_log_analytics_storage_insight_config" "example" {
+ name = "example-storageinsightconfig"
+ resource_group_name = azurerm_resource_group.example.name
+ workspace_name = azurerm_log_analytics_workspace.example.name
+}
+```
+
+## Arguments Reference
+
+The following arguments are supported:
+
+* `name` - (Required) The name which should be used for this Log Analytics Storage Insight Config. Changing this forces a new Log Analytics Storage Insight Config to be created.
+
+* `resource_group_name` - (Required) The name of the Resource Group where the Log Analytics Storage Insight Config should exist. Changing this forces a new Log Analytics Storage Insight Config to be created.
+
+* `workspace_resource_id` - (Required) The resource ID of the workspace to create the Log Analytics Storage Insight Config within. Changing this forces a new Log Analytics Storage Insight Config to be created.
+
+* `containers` - (Optional) The names of the blob containers that the workspace should read.
+
+* `storage_account_resource_id` - (Required) The storage account resource id.
+
+* `table_names` - (Required) The names of the Azure tables that the workspace should read.
+
+* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Storage Insight Config.
+
+
+## Attributes Reference
+
+In addition to the Arguments listed above - the following Attributes are exported:
+
+* `id` - The ID of the Log Analytics Storage Insight Config.
+
+## Timeouts
+
+The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
+
+* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Storage Insight Config.
+* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Storage Insight Config.
+* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Storage Insight Config.
+* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Storage Insight Config.
+
+## Import
+
+Log Analytics Storage Insight Configs can be imported using the `resource id`, e.g.
+
+```shell
+terraform import azurerm_log_analytics_storage_insight_config.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1
+```
\ No newline at end of file
From 688b49c2f3ebb65fcb62c6bef67f2fcbd0b70604 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Sat, 24 Oct 2020 23:56:29 -0700
Subject: [PATCH 03/46] Correcting schema updates
---
...lytics_storage_insights_config_resource.go | 46 ++++++++++---------
.../validate/log_analytics_name.go | 6 +--
.../log_analytics_storage_insight_config.go | 4 ++
3 files changed, 32 insertions(+), 24 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
index cad65b150195..b5904cebb80e 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
@@ -7,11 +7,12 @@ import (
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
- "github.com/terraform-providers/terraform-provider-azuread/azuread/helpers/validate"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
@@ -39,9 +40,10 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
Schema: map[string]*schema.Schema{
"name": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateFunc: validate.LogAnalyticsStorageInsightConfigName,
},
"resource_group_name": azure.SchemaResourceGroupName(),
@@ -53,7 +55,7 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
ValidateFunc: azure.ValidateResourceID,
},
- "container_names": {
+ "blob_container_names": {
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Schema{
@@ -69,11 +71,11 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
"table_names": {
Type: schema.TypeSet,
- Required: true,
+ Optional: true,
MinItems: 1,
Elem: &schema.Schema{
Type: schema.TypeString,
- ValidateFunc: validate.NoEmptyStrings,
+ ValidateFunc: validation.NoZeroValues,
},
},
@@ -88,13 +90,16 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
name := d.Get("name").(string)
resourceGroup := d.Get("resource_group_name").(string)
- workspaceName := d.Get("workspace_name").(string)
+ workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_resource_id").(string))
+ if err != nil {
+ return err
+ }
if d.IsNewResource() {
- existing, err := client.Get(ctx, resourceGroup, workspaceName, name)
+ existing, err := client.Get(ctx, resourceGroup, workspace.Name, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
- return fmt.Errorf("checking for present of existing LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspaceName, err)
+ return fmt.Errorf("checking for present of existing Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
}
if existing.ID != nil && *existing.ID != "" {
@@ -103,25 +108,24 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
}
parameters := operationalinsights.StorageInsight{
- ETag: utils.String(d.Get("e_tag").(string)),
StorageInsightProperties: &operationalinsights.StorageInsightProperties{
- Containers: utils.ExpandStringSlice(d.Get("containers").(*schema.Set).List()),
- StorageAccount: expandArmStorageInsightConfigStorageAccount(d.Get("storage_account").([]interface{})),
- Tables: utils.ExpandStringSlice(d.Get("tables").(*schema.Set).List()),
+ Containers: utils.ExpandStringSlice(d.Get("blob_container_names").(*schema.Set).List()),
+ StorageAccount: expandArmStorageInsightConfigStorageAccount(d.Get("storage_account_resource_id").([]interface{})),
+ Tables: utils.ExpandStringSlice(d.Get("table_names").(*schema.Set).List()),
},
Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
}
- if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspaceName, name, parameters); err != nil {
- return fmt.Errorf("creating/updating LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspaceName, err)
+ if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, name, parameters); err != nil {
+ return fmt.Errorf("creating/updating Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
- resp, err := client.Get(ctx, resourceGroup, workspaceName, name)
+ resp, err := client.Get(ctx, resourceGroup, workspace.Name, name)
if err != nil {
- return fmt.Errorf("retrieving LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspaceName, err)
+ return fmt.Errorf("retrieving Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
if resp.ID == nil || *resp.ID == "" {
- return fmt.Errorf("empty or nil ID returned for LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspaceName)
+ return fmt.Errorf("empty or nil ID returned for Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.Name)
}
d.SetId(*resp.ID)
@@ -141,11 +145,11 @@ func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, met
resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
- log.Printf("[INFO] operationalinsights %q does not exist - removing from state", d.Id())
+ log.Printf("[INFO] Log Analytics Storage Insight Config %q does not exist - removing from state", d.Id())
d.SetId("")
return nil
}
- return fmt.Errorf("retrieving LogAnalytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
+ return fmt.Errorf("retrieving Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
}
d.Set("name", id.Name)
d.Set("resource_group_name", id.ResourceGroup)
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
index ad416ca621ee..0e97f0a70551 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
@@ -8,15 +8,15 @@ import (
func LogAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(string)
if !ok {
- errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ errors = append(errors, fmt.Errorf("expected %q to be a string", k))
return
}
if len(v) < 4 {
- errors = append(errors, fmt.Errorf("%q length should be greater than %d", k, 4))
+ errors = append(errors, fmt.Errorf("%q length should be greater than or equal to %d characters in length", k, 4))
return
}
if len(v) > 63 {
- errors = append(errors, fmt.Errorf("%q length should be less than %d", k, 63))
+ errors = append(errors, fmt.Errorf("%q length should be less than or equal %d characters in length", k, 63))
return
}
if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
index 6cc625e3ed8e..4971bf785534 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
@@ -1,5 +1,9 @@
package validate
+func LogAnalyticsStorageInsightConfigName(i interface{}, k string) (warnings []string, errors []error) {
+ return LogAnalyticsGenericName(i, k)
+}
+
func LogAnalyticsStorageInsightConfigWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
return LogAnalyticsGenericName(i, k)
}
From 34b72287e766b7e8d30e880a8d9581362d464967 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Sun, 25 Oct 2020 20:35:52 -0700
Subject: [PATCH 04/46] Working resource
---
...lytics_storage_insights_config_resource.go | 96 +++++++++++++------
.../log_analytics_storage_insight_config.go | 10 +-
.../validate/log_analytics_name.go | 26 +++++
...ytics_storage_insight_config.html.markdown | 42 ++++++--
4 files changed, 133 insertions(+), 41 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
index b5904cebb80e..b2fb7fd4c452 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
@@ -9,6 +9,7 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
@@ -46,13 +47,36 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
ValidateFunc: validate.LogAnalyticsStorageInsightConfigName,
},
- "resource_group_name": azure.SchemaResourceGroupName(),
+ // must ignore case since API lowercases all returned data
+ "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(),
"workspace_resource_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- ValidateFunc: azure.ValidateResourceID,
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ DiffSuppressFunc: suppress.CaseDifference,
+ ValidateFunc: azure.ValidateResourceID,
+ },
+
+ "storage_account": {
+ Type: schema.TypeList,
+ Required: true,
+ MaxItems: 1,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "id": {
+ Type: schema.TypeString,
+ Required: true,
+ ValidateFunc: azure.ValidateResourceID,
+ },
+ "key": {
+ Type: schema.TypeString,
+ Required: true,
+ Sensitive: true,
+ ValidateFunc: validate.IsBase64Encoded,
+ },
+ },
+ },
},
"blob_container_names": {
@@ -63,12 +87,6 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
},
},
- "storage_account_resource_id": {
- Type: schema.TypeString,
- Required: true,
- ValidateFunc: azure.ValidateResourceID,
- },
-
"table_names": {
Type: schema.TypeSet,
Optional: true,
@@ -109,12 +127,19 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
parameters := operationalinsights.StorageInsight{
StorageInsightProperties: &operationalinsights.StorageInsightProperties{
- Containers: utils.ExpandStringSlice(d.Get("blob_container_names").(*schema.Set).List()),
- StorageAccount: expandArmStorageInsightConfigStorageAccount(d.Get("storage_account_resource_id").([]interface{})),
- Tables: utils.ExpandStringSlice(d.Get("table_names").(*schema.Set).List()),
+ StorageAccount: expandArmStorageInsightConfigStorageAccount(d.Get("storage_account").([]interface{})),
},
Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
}
+
+ if _, ok := d.GetOk("table_names"); ok {
+ parameters.StorageInsightProperties.Tables = utils.ExpandStringSlice(d.Get("table_names").(*schema.Set).List())
+ }
+
+ if _, ok := d.GetOk("blob_container_names"); ok {
+ parameters.StorageInsightProperties.Containers = utils.ExpandStringSlice(d.Get("blob_container_names").(*schema.Set).List())
+ }
+
if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, name, parameters); err != nil {
return fmt.Errorf("creating/updating Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
@@ -151,18 +176,23 @@ func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, met
}
return fmt.Errorf("retrieving Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
}
+
d.Set("name", id.Name)
d.Set("resource_group_name", id.ResourceGroup)
- d.Set("workspace_name", id.WorkspaceName)
- d.Set("e_tag", resp.ETag)
+ d.Set("workspace_resource_id", id.WorkspaceID)
+
+ // the API does not return the key so we need to pull it from the config
+ sa := d.Get("storage_account").([]interface{})
+ v := sa[0].(map[string]interface{})
+
if props := resp.StorageInsightProperties; props != nil {
- d.Set("containers", utils.FlattenStringSlice(props.Containers))
- if err := d.Set("storage_account", flattenArmStorageInsightConfigStorageAccount(props.StorageAccount)); err != nil {
+ d.Set("blob_container_names", utils.FlattenStringSlice(props.Containers))
+ if err := d.Set("storage_account", flattenArmStorageInsightConfigStorageAccount(props.StorageAccount, v["key"].(string))); err != nil {
return fmt.Errorf("setting `storage_account`: %+v", err)
}
- d.Set("tables", utils.FlattenStringSlice(props.Tables))
+ d.Set("table_names", utils.FlattenStringSlice(props.Tables))
}
- d.Set("type", resp.Type)
+
return tags.FlattenAndSet(d, resp.Tags)
}
@@ -186,24 +216,30 @@ func expandArmStorageInsightConfigStorageAccount(input []interface{}) *operation
if len(input) == 0 {
return nil
}
+
v := input[0].(map[string]interface{})
return &operationalinsights.StorageAccount{
+ ID: utils.String(v["id"].(string)),
Key: utils.String(v["key"].(string)),
}
}
-func flattenArmStorageInsightConfigStorageAccount(input *operationalinsights.StorageAccount) []interface{} {
+// you must pass the storage account key to the the flatten since the API only returns the id of the storage account
+func flattenArmStorageInsightConfigStorageAccount(input *operationalinsights.StorageAccount, key string) *[]interface{} {
+ output := make([]interface{}, 0)
if input == nil {
- return make([]interface{}, 0)
+ return &output
}
- var key string
- if input.Key != nil {
- key = *input.Key
- }
- return []interface{}{
- map[string]interface{}{
- "key": key,
- },
+ var id string
+ if input.ID != nil {
+ id = *input.ID
}
+
+ output = append(output, map[string]interface{}{
+ "id": id,
+ "key": key,
+ })
+
+ return &output
}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
index c8be8d1ed985..566ba96842ef 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
@@ -9,13 +9,14 @@ import (
type LogAnalyticsStorageInsightConfigId struct {
ResourceGroup string
WorkspaceName string
+ WorkspaceID string
Name string
}
func LogAnalyticsStorageInsightConfigID(input string) (*LogAnalyticsStorageInsightConfigId, error) {
id, err := azure.ParseAzureResourceID(input)
if err != nil {
- return nil, fmt.Errorf("parsing LogAnalyticsStorageInsightConfig ID %q: %+v", input, err)
+ return nil, fmt.Errorf("parsing Log Analytics Storage Insight Config ID %q: %+v", input, err)
}
logAnalyticsStorageInsightConfig := LogAnalyticsStorageInsightConfigId{
@@ -24,8 +25,13 @@ func LogAnalyticsStorageInsightConfigID(input string) (*LogAnalyticsStorageInsig
if logAnalyticsStorageInsightConfig.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
return nil, err
}
+ if logAnalyticsStorageInsightConfig.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsightConfig.WorkspaceName); err != nil {
+ return nil, fmt.Errorf("formatting Log Analytics Storage Insight Config workspace ID %q", input)
+ }
if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
- return nil, err
+ if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageinsightconfigs"); err != nil {
+ return nil, err
+ }
}
if err := id.ValidateNoEmptySegments(input); err != nil {
return nil, err
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
index 0e97f0a70551..cf5937cd43d0 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
@@ -1,8 +1,10 @@
package validate
import (
+ "encoding/base64"
"fmt"
"regexp"
+ "strings"
)
func LogAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) {
@@ -11,17 +13,41 @@ func LogAnalyticsGenericName(i interface{}, k string) (warnings []string, errors
errors = append(errors, fmt.Errorf("expected %q to be a string", k))
return
}
+
if len(v) < 4 {
errors = append(errors, fmt.Errorf("%q length should be greater than or equal to %d characters in length", k, 4))
return
}
+
if len(v) > 63 {
errors = append(errors, fmt.Errorf("%q length should be less than or equal %d characters in length", k, 63))
return
}
+
if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
errors = append(errors, fmt.Errorf("the %q is invalid, the %q must begin with an alphanumeric character, end with an alphanumeric character and may only contain alphanumeric characters or hyphens, got %q", k, k, v))
return
}
return
}
+
+func IsBase64Encoded(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected %q to be a string", k))
+ return
+ }
+
+ if len(strings.TrimSpace(v)) < 1 {
+ errors = append(errors, fmt.Errorf("%q must not be an empty string", k))
+ return
+ }
+
+ _, err := base64.StdEncoding.DecodeString(v)
+ if err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a base64 encoded string", k))
+ return
+ }
+
+ return
+}
diff --git a/website/docs/r/log_analytics_storage_insight_config.html.markdown b/website/docs/r/log_analytics_storage_insight_config.html.markdown
index 0db1d1e43581..c54e6e775f9c 100644
--- a/website/docs/r/log_analytics_storage_insight_config.html.markdown
+++ b/website/docs/r/log_analytics_storage_insight_config.html.markdown
@@ -19,15 +19,30 @@ resource "azurerm_resource_group" "example" {
}
resource "azurerm_log_analytics_workspace" "example" {
- name = "example-workspace"
+ name = "exampleworkspace"
+ location = azurerm_resource_group.example.location
resource_group_name = azurerm_resource_group.example.name
- location = azurerm_resource_group.example.location
+ sku = "PerGB2018"
+ retention_in_days = 30
+}
+
+resource "azurerm_storage_account" "example" {
+ name = "examplestoracc"
+ resource_group_name = azurerm_resource_group.example.name
+ location = azurerm_resource_group.example.location
+ account_tier = "Standard"
+ account_replication_type = "LRS"
}
resource "azurerm_log_analytics_storage_insight_config" "example" {
- name = "example-storageinsightconfig"
- resource_group_name = azurerm_resource_group.example.name
- workspace_name = azurerm_log_analytics_workspace.example.name
+ name = "example-storageinsightconfig"
+ resource_group_name = azurerm_resource_group.example.name
+ workspace_resource_id = azurerm_log_analytics_workspace.example.id
+
+ storage_account {
+ id = azurerm_storage_account.example.id
+ key = azurerm_storage_account.example.primary_access_key
+ }
}
```
@@ -41,14 +56,23 @@ The following arguments are supported:
* `workspace_resource_id` - (Required) The resource ID of the workspace to create the Log Analytics Storage Insight Config within. Changing this forces a new Log Analytics Storage Insight Config to be created.
-* `containers` - (Optional) The names of the blob containers that the workspace should read.
+* `storage_account` - (Required) A `storage_account` block as defined below.
-* `storage_account_resource_id` - (Required) The storage account resource id.
+* `blob_container_names` - (Optional) The names of the blob containers that the workspace should read.
-* `table_names` - (Required) The names of the Azure tables that the workspace should read.
+* `table_names` - (Optional) The names of the Azure tables that the workspace should read.
* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Storage Insight Config.
+---
+
+The `storage_account` block supports:
+
+* `id` - (Required) The resource ID of the storage account to be used by this Log Analytics Storage Insight Config.
+
+* `key` - (Required) The storage access key to be used to connect to the storage account.
+
+---
## Attributes Reference
@@ -61,8 +85,8 @@ In addition to the Arguments listed above - the following Attributes are exporte
The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Storage Insight Config.
-* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Storage Insight Config.
* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Storage Insight Config.
+* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Storage Insight Config.
* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Storage Insight Config.
## Import
From 5da7caa3f3c73b1bd71d2f717bdc84bc98a38942 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Mon, 26 Oct 2020 17:57:50 -0700
Subject: [PATCH 05/46] Updates and tests
---
...lytics_storage_insights_config_resource.go | 82 +++++---------
.../parse/log_analytics_data_export_test.go | 1 +
...g_analytics_storage_insight_config_test.go | 1 +
...cs_storage_insight_config_resource_test.go | 103 +++++++++++-------
.../log_analytics_data_export_test.go | 15 ++-
...g_analytics_storage_insight_config_test.go | 17 ++-
...ytics_storage_insight_config.html.markdown | 20 +---
7 files changed, 125 insertions(+), 114 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
index b2fb7fd4c452..123240b846bb 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
@@ -3,6 +3,7 @@ package loganalytics
import (
"fmt"
"log"
+ "strings"
"time"
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
@@ -58,25 +59,18 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
ValidateFunc: azure.ValidateResourceID,
},
- "storage_account": {
- Type: schema.TypeList,
- Required: true,
- MaxItems: 1,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "id": {
- Type: schema.TypeString,
- Required: true,
- ValidateFunc: azure.ValidateResourceID,
- },
- "key": {
- Type: schema.TypeString,
- Required: true,
- Sensitive: true,
- ValidateFunc: validate.IsBase64Encoded,
- },
- },
- },
+ "storage_account_resource_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ValidateFunc: azure.ValidateResourceID,
+ },
+
+ "storage_account_key": {
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ Sensitive: true,
+ ValidateFunc: validate.IsBase64Encoded,
},
"blob_container_names": {
@@ -108,6 +102,12 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
name := d.Get("name").(string)
resourceGroup := d.Get("resource_group_name").(string)
+ storageAccountId := d.Get("storage_account_resource_id").(string)
+ storageAccountKey := d.Get("storage_account_key").(string)
+ if len(strings.TrimSpace(storageAccountKey)) < 1 {
+ return fmt.Errorf("The argument 'storage_account_key' is required, but no definition was found.")
+ }
+
workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_resource_id").(string))
if err != nil {
return err
@@ -127,7 +127,7 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
parameters := operationalinsights.StorageInsight{
StorageInsightProperties: &operationalinsights.StorageInsightProperties{
- StorageAccount: expandArmStorageInsightConfigStorageAccount(d.Get("storage_account").([]interface{})),
+ StorageAccount: expandArmStorageInsightConfigStorageAccount(storageAccountId, storageAccountKey),
},
Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
}
@@ -167,6 +167,9 @@ func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, met
return err
}
+ // Need to pull this from the config since the API does not return this value
+ storageAccountKey := d.Get("storage_account_key").(string)
+
resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
@@ -181,15 +184,10 @@ func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, met
d.Set("resource_group_name", id.ResourceGroup)
d.Set("workspace_resource_id", id.WorkspaceID)
- // the API does not return the key so we need to pull it from the config
- sa := d.Get("storage_account").([]interface{})
- v := sa[0].(map[string]interface{})
-
if props := resp.StorageInsightProperties; props != nil {
d.Set("blob_container_names", utils.FlattenStringSlice(props.Containers))
- if err := d.Set("storage_account", flattenArmStorageInsightConfigStorageAccount(props.StorageAccount, v["key"].(string))); err != nil {
- return fmt.Errorf("setting `storage_account`: %+v", err)
- }
+ d.Set("storage_account_resource_id", props.StorageAccount.ID)
+ d.Set("storage_account_key", storageAccountKey)
d.Set("table_names", utils.FlattenStringSlice(props.Tables))
}
@@ -212,34 +210,10 @@ func resourceArmLogAnalyticsStorageInsightConfigDelete(d *schema.ResourceData, m
return nil
}
-func expandArmStorageInsightConfigStorageAccount(input []interface{}) *operationalinsights.StorageAccount {
- if len(input) == 0 {
- return nil
- }
+func expandArmStorageInsightConfigStorageAccount(id string, key string) *operationalinsights.StorageAccount {
- v := input[0].(map[string]interface{})
return &operationalinsights.StorageAccount{
- ID: utils.String(v["id"].(string)),
- Key: utils.String(v["key"].(string)),
- }
-}
-
-// you must pass the storage account key to the the flatten since the API only returns the id of the storage account
-func flattenArmStorageInsightConfigStorageAccount(input *operationalinsights.StorageAccount, key string) *[]interface{} {
- output := make([]interface{}, 0)
- if input == nil {
- return &output
+ ID: utils.String(id),
+ Key: utils.String(key),
}
-
- var id string
- if input.ID != nil {
- id = *input.ID
- }
-
- output = append(output, map[string]interface{}{
- "id": id,
- "key": key,
- })
-
- return &output
}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go
index cc53682a7f5e..9582c5e997f5 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go
@@ -41,6 +41,7 @@ func TestLogAnalyticsDataExportID(t *testing.T) {
Expected: &LogAnalyticsDataExportId{
ResourceGroup: "resourceGroup1",
WorkspaceName: "workspace1",
+ WorkspaceID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1",
Name: "dataExport1",
},
},
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
index a739e07c02db..5d5f821ba194 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
@@ -41,6 +41,7 @@ func TestLogAnalyticsStorageInsightConfigID(t *testing.T) {
Expected: &LogAnalyticsStorageInsightConfigId{
ResourceGroup: "resourceGroup1",
WorkspaceName: "workspace1",
+ WorkspaceID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1",
Name: "storageInsight1",
},
},
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
index 999dfeef4790..e55e78088d2f 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
@@ -25,7 +25,7 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_basic(t *testing.T) {
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"), // key is not returned by the API
},
})
}
@@ -61,7 +61,7 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_complete(t *testing.T) {
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"), // key is not returned by the API
},
})
}
@@ -79,21 +79,21 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_update(t *testing.T) {
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"),
{
Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"),
{
Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"), // key is not returned by the API
},
})
}
@@ -111,14 +111,14 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(t *test
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"),
{
Config: testAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("storage_account_key"), // key is not returned by the API
},
})
}
@@ -174,16 +174,26 @@ provider "azurerm" {
}
resource "azurerm_resource_group" "test" {
- name = "acctest-la-%d"
+ name = "acctestRG-la-%d"
location = "%s"
}
resource "azurerm_log_analytics_workspace" "test" {
- name = "acctest-law-%d"
+ name = "acctestLAW-%d"
+ location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
+ sku = "PerGB2018"
}
-`, data.RandomInteger, data.Locations.Primary, data.RandomInteger)
+
+resource "azurerm_storage_account" "test" {
+ name = "acctestsads%s"
+ resource_group_name = azurerm_resource_group.test.name
+
+ location = azurerm_resource_group.test.location
+ account_tier = "Standard"
+ account_replication_type = "LRS"
+}
+`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString)
}
func testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data acceptance.TestData) string {
@@ -192,9 +202,12 @@ func testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data acceptance.TestDa
%s
resource "azurerm_log_analytics_storage_insight_config" "test" {
- name = "acctest-lasic-%d"
- resource_group_name = azurerm_resource_group.test.name
- workspace_name = azurerm_log_analytics_workspace.test.name
+ name = "acctest-la-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_resource_id = azurerm_log_analytics_workspace.test.id
+
+ storage_account_resource_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
}
`, template, data.RandomInteger)
}
@@ -205,9 +218,12 @@ func testAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport(data acceptan
%s
resource "azurerm_log_analytics_storage_insight_config" "import" {
- name = azurerm_log_analytics_storage_insight_config.test.name
- resource_group_name = azurerm_log_analytics_storage_insight_config.test.resource_group_name
- workspace_name = azurerm_log_analytics_storage_insight_config.test.workspace_name
+ name = azurerm_log_analytics_storage_insight_config.test.name
+ resource_group_name = azurerm_log_analytics_storage_insight_config.test.resource_group_name
+ workspace_resource_id = azurerm_log_analytics_storage_insight_config.test.workspace_resource_id
+
+ storage_account_resource_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
}
`, config)
}
@@ -218,18 +234,15 @@ func testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data acceptance.Tes
%s
resource "azurerm_log_analytics_storage_insight_config" "test" {
- name = "acctest-lasic-%d"
- resource_group_name = azurerm_resource_group.test.name
- workspace_name = azurerm_log_analytics_workspace.test.name
- containers = ["wad-iis-logfiles"]
- e_tag = ""
- storage_account {
- key = "1234"
- }
- tables = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
- tags = {
- ENV = "Test"
- }
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_resource_id = azurerm_log_analytics_workspace.test.id
+
+ blob_container_names = ["wad-iis-logfiles"]
+ table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
+
+ storage_account_resource_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
}
`, template, data.RandomInteger)
}
@@ -239,19 +252,25 @@ func testAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(data ac
return fmt.Sprintf(`
%s
-resource "azurerm_log_analytics_storage_insight_config" "test" {
- name = "acctest-lasic-%d"
+resource "azurerm_storage_account" "test2" {
+ name = "acctestsads%s"
resource_group_name = azurerm_resource_group.test.name
- workspace_name = azurerm_log_analytics_workspace.test.name
- containers = ["wad-iis-logfiles"]
- e_tag = ""
- storage_account {
- key = "1234"
- }
- tables = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
- tags = {
- ENV = "Test"
- }
+
+ location = azurerm_resource_group.test.location
+ account_tier = "Standard"
+ account_replication_type = "LRS"
}
-`, template, data.RandomInteger)
+
+resource "azurerm_log_analytics_storage_insight_config" "test" {
+ name = "acctest-la-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_resource_id = azurerm_log_analytics_workspace.test.id
+
+ blob_container_names = ["wad-iis-logfiles"]
+ table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
+
+ storage_account_resource_id = azurerm_storage_account.test2.id
+ storage_account_key = azurerm_storage_account.test2.primary_access_key
+}
+`, template, data.RandomStringOfLength(6), data.RandomInteger)
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go
index 38b9b8b55502..a91d9f35c83a 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export_test.go
@@ -6,55 +6,68 @@ import (
func TestLogAnalyticsDataExportName(t *testing.T) {
testCases := []struct {
+ Name string
Input string
Expected bool
}{
{
+ Name: "Too short",
Input: "inv",
Expected: false,
},
{
+ Name: "Invalid characters underscores",
Input: "invalid_Exports_Name",
Expected: false,
},
{
+ Name: "Invalid characters space",
Input: "invalid Exports Name",
Expected: false,
},
{
+ Name: "Invalid name starts with hyphen",
Input: "-invalidExportsName",
Expected: false,
},
{
+ Name: "Invalid name ends with hyphen",
Input: "invalidExportsName-",
Expected: false,
},
{
+ Name: "Invalid name too long",
Input: "thisIsToLooooooooooooooooooooooooooooooooooooooongForAExportName",
Expected: false,
},
{
+ Name: "Valid name",
Input: "validExportsName",
Expected: true,
},
{
+ Name: "Valid name with hyphen",
Input: "validExportsName-2",
Expected: true,
},
{
+ Name: "Valid name max length",
Input: "thisIsTheLoooooooooooooooooooooooooongestValidExportNameThereIs",
Expected: true,
},
{
+ Name: "Valid name min length",
Input: "vali",
Expected: true,
},
}
for _, v := range testCases {
+ t.Logf("[DEBUG] Testing %q..", v.Name)
+
_, errors := LogAnalyticsDataExportName(v.Input, "name")
result := len(errors) == 0
if result != v.Expected {
- t.Fatalf("Expected the result to be %q but got %q (and %d errors)", v.Expected, result, len(errors))
+ t.Fatalf("Expected the result to be %v but got %v (and %d errors)", v.Expected, result, len(errors))
}
}
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
index eb1963bd8bc4..5e0029d9652b 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
@@ -6,55 +6,68 @@ import (
func TestLogAnalyticsStorageInsightConfigWorkspaceName(t *testing.T) {
testCases := []struct {
+ Name string
Input string
Expected bool
}{
{
+ Name: "Too short",
Input: "inv",
Expected: false,
},
{
+ Name: "Invalid characters underscores",
Input: "invalid_Exports_Name",
Expected: false,
},
{
+ Name: "Invalid characters space",
Input: "invalid Storage Insight Config Name Name",
Expected: false,
},
{
+ Name: "Invalid name starts with hyphen",
Input: "-invalidStorageInsightConfigName",
Expected: false,
},
{
+ Name: "Invalid name ends with hyphen",
Input: "invalidStorageInsightConfigName-",
Expected: false,
},
{
+ Name: "Invalid name too long",
Input: "thisIsToLoooooooooooooooooooooongestForAStorageInsightConfigName",
- Expected: true,
+ Expected: false,
},
{
+ Name: "Valid name",
Input: "validStorageInsightConfigName",
Expected: true,
},
{
+ Name: "Valid name with hyphen",
Input: "validStorageInsightConfigName-2",
Expected: true,
},
{
+ Name: "Valid name max length",
Input: "thisIsTheLoooooooooooongestValidStorageInsightConfigNameThereIs",
Expected: true,
},
{
+ Name: "Valid name min length",
Input: "vali",
Expected: true,
},
}
for _, v := range testCases {
+ t.Logf("[DEBUG] Testing %q..", v.Name)
+
_, errors := LogAnalyticsStorageInsightConfigWorkspaceName(v.Input, "workspace_name")
result := len(errors) == 0
if result != v.Expected {
- t.Fatalf("Expected the result to be %q but got %q (and %d errors)", v.Expected, result, len(errors))
+ t.Fatalf("Expected the result to be %v but got %v (and %d errors)", v.Expected, result, len(errors))
}
}
}
diff --git a/website/docs/r/log_analytics_storage_insight_config.html.markdown b/website/docs/r/log_analytics_storage_insight_config.html.markdown
index c54e6e775f9c..66d9b744be39 100644
--- a/website/docs/r/log_analytics_storage_insight_config.html.markdown
+++ b/website/docs/r/log_analytics_storage_insight_config.html.markdown
@@ -39,10 +39,8 @@ resource "azurerm_log_analytics_storage_insight_config" "example" {
resource_group_name = azurerm_resource_group.example.name
workspace_resource_id = azurerm_log_analytics_workspace.example.id
- storage_account {
- id = azurerm_storage_account.example.id
- key = azurerm_storage_account.example.primary_access_key
- }
+ storage_account_resource_id = azurerm_storage_account.example.id
+ storage_account_key = azurerm_storage_account.example.primary_access_key
}
```
@@ -56,7 +54,9 @@ The following arguments are supported:
* `workspace_resource_id` - (Required) The resource ID of the workspace to create the Log Analytics Storage Insight Config within. Changing this forces a new Log Analytics Storage Insight Config to be created.
-* `storage_account` - (Required) A `storage_account` block as defined below.
+* `storage_account_resource_id` - (Required) The resource ID of the storage account to be used by this Log Analytics Storage Insight Config.
+
+* `storage_account_key` - (Required) The storage access key to be used to connect to the storage account.
* `blob_container_names` - (Optional) The names of the blob containers that the workspace should read.
@@ -64,16 +64,6 @@ The following arguments are supported:
* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Storage Insight Config.
----
-
-The `storage_account` block supports:
-
-* `id` - (Required) The resource ID of the storage account to be used by this Log Analytics Storage Insight Config.
-
-* `key` - (Required) The storage access key to be used to connect to the storage account.
-
----
-
## Attributes Reference
In addition to the Arguments listed above - the following Attributes are exported:
From 3b4d10ec6dcb21d28b88bd342480cd0624a267b3 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Mon, 26 Oct 2020 21:11:46 -0700
Subject: [PATCH 06/46] Terrafmt
---
.../log_analytics_storage_insights_config_resource.go | 1 -
.../log_analytics_storage_insight_config_resource_test.go | 4 ++--
.../docs/r/log_analytics_storage_insight_config.html.markdown | 4 ++--
3 files changed, 4 insertions(+), 5 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
index 123240b846bb..8d54b9323783 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
@@ -211,7 +211,6 @@ func resourceArmLogAnalyticsStorageInsightConfigDelete(d *schema.ResourceData, m
}
func expandArmStorageInsightConfigStorageAccount(id string, key string) *operationalinsights.StorageAccount {
-
return &operationalinsights.StorageAccount{
ID: utils.String(id),
Key: utils.String(key),
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
index e55e78088d2f..d353a562f5d3 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
@@ -239,7 +239,7 @@ resource "azurerm_log_analytics_storage_insight_config" "test" {
workspace_resource_id = azurerm_log_analytics_workspace.test.id
blob_container_names = ["wad-iis-logfiles"]
- table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
+ table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
storage_account_resource_id = azurerm_storage_account.test.id
storage_account_key = azurerm_storage_account.test.primary_access_key
@@ -267,7 +267,7 @@ resource "azurerm_log_analytics_storage_insight_config" "test" {
workspace_resource_id = azurerm_log_analytics_workspace.test.id
blob_container_names = ["wad-iis-logfiles"]
- table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
+ table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
storage_account_resource_id = azurerm_storage_account.test2.id
storage_account_key = azurerm_storage_account.test2.primary_access_key
diff --git a/website/docs/r/log_analytics_storage_insight_config.html.markdown b/website/docs/r/log_analytics_storage_insight_config.html.markdown
index 66d9b744be39..8e86b241d23d 100644
--- a/website/docs/r/log_analytics_storage_insight_config.html.markdown
+++ b/website/docs/r/log_analytics_storage_insight_config.html.markdown
@@ -39,8 +39,8 @@ resource "azurerm_log_analytics_storage_insight_config" "example" {
resource_group_name = azurerm_resource_group.example.name
workspace_resource_id = azurerm_log_analytics_workspace.example.id
- storage_account_resource_id = azurerm_storage_account.example.id
- storage_account_key = azurerm_storage_account.example.primary_access_key
+ storage_account_resource_id = azurerm_storage_account.example.id
+ storage_account_key = azurerm_storage_account.example.primary_access_key
}
```
From 85787651d56eb381e6cef25a22734fb0e92d8ab9 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Mon, 26 Oct 2020 21:16:50 -0700
Subject: [PATCH 07/46] Terrafmt
---
website/docs/r/log_analytics_cluster.html.markdown | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
index c6c93fcfe61e..911e03d0c1ba 100644
--- a/website/docs/r/log_analytics_cluster.html.markdown
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -23,9 +23,9 @@ resource "azurerm_resource_group" "example" {
}
resource "azurerm_log_analytics_cluster" "example" {
- name = "example-cluster"
+ name = "example-cluster"
resource_group_name = azurerm_resource_group.example.name
- location = azurerm_resource_group.example.location
+ location = azurerm_resource_group.example.location
}
```
From 5ca15d40cfd86521edbda287a32873dc330a32c9 Mon Sep 17 00:00:00 2001
From: WS <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 4 Nov 2020 18:34:36 -0800
Subject: [PATCH 08/46] Update
azurerm/internal/services/loganalytics/client/client.go
Co-authored-by: kt
---
azurerm/internal/services/loganalytics/client/client.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/client/client.go b/azurerm/internal/services/loganalytics/client/client.go
index 5ae7432a8d32..bdfb164ee270 100644
--- a/azurerm/internal/services/loganalytics/client/client.go
+++ b/azurerm/internal/services/loganalytics/client/client.go
@@ -13,7 +13,7 @@ type Client struct {
SavedSearchesClient *operationalinsights.SavedSearchesClient
SharedKeysClient *operationalinsights.SharedKeysClient
SolutionsClient *operationsmanagement.SolutionsClient
- StorageInsightConfigClient *operationalinsights.StorageInsightConfigsClient
+ StorageInsightsClient *operationalinsights.StorageInsightConfigsClient
WorkspacesClient *operationalinsights.WorkspacesClient
}
From e821b399bffd198524be5089709f5eaadb16afa0 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 4 Nov 2020 21:25:12 -0800
Subject: [PATCH 09/46] Updates per PR review...
---
.../services/loganalytics/client/client.go | 36 ++---
...og_analytics_storage_insights_resource.go} | 59 ++++----
...g.go => log_analytics_storage_insights.go} | 10 +-
...=> log_analytics_storage_insights_test.go} | 8 +-
.../services/loganalytics/registration.go | 2 +-
...alytics_storage_insights_resource_test.go} | 136 +++++++++---------
.../log_analytics_storage_insight_config.go | 9 --
.../log_analytics_storage_insights.go | 9 ++
...=> log_analytics_storage_insights_test.go} | 4 +-
website/azurerm.erb | 2 +-
..._analytics_storage_insights.html.markdown} | 42 +++---
11 files changed, 159 insertions(+), 158 deletions(-)
rename azurerm/internal/services/loganalytics/{log_analytics_storage_insights_config_resource.go => log_analytics_storage_insights_resource.go} (72%)
rename azurerm/internal/services/loganalytics/parse/{log_analytics_storage_insight_config.go => log_analytics_storage_insights.go} (75%)
rename azurerm/internal/services/loganalytics/parse/{log_analytics_storage_insight_config_test.go => log_analytics_storage_insights_test.go} (91%)
rename azurerm/internal/services/loganalytics/tests/{log_analytics_storage_insight_config_resource_test.go => log_analytics_storage_insights_resource_test.go} (53%)
delete mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
create mode 100644 azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go
rename azurerm/internal/services/loganalytics/validate/{log_analytics_storage_insight_config_test.go => log_analytics_storage_insights_test.go} (90%)
rename website/docs/r/{log_analytics_storage_insight_config.html.markdown => log_analytics_storage_insights.html.markdown} (55%)
diff --git a/azurerm/internal/services/loganalytics/client/client.go b/azurerm/internal/services/loganalytics/client/client.go
index 5ae7432a8d32..860c0a067f90 100644
--- a/azurerm/internal/services/loganalytics/client/client.go
+++ b/azurerm/internal/services/loganalytics/client/client.go
@@ -7,14 +7,14 @@ import (
)
type Client struct {
- DataExportClient *operationalinsights.DataExportsClient
- DataSourcesClient *operationalinsights.DataSourcesClient
- LinkedServicesClient *operationalinsights.LinkedServicesClient
- SavedSearchesClient *operationalinsights.SavedSearchesClient
- SharedKeysClient *operationalinsights.SharedKeysClient
- SolutionsClient *operationsmanagement.SolutionsClient
- StorageInsightConfigClient *operationalinsights.StorageInsightConfigsClient
- WorkspacesClient *operationalinsights.WorkspacesClient
+ DataExportClient *operationalinsights.DataExportsClient
+ DataSourcesClient *operationalinsights.DataSourcesClient
+ LinkedServicesClient *operationalinsights.LinkedServicesClient
+ SavedSearchesClient *operationalinsights.SavedSearchesClient
+ SharedKeysClient *operationalinsights.SharedKeysClient
+ SolutionsClient *operationsmanagement.SolutionsClient
+ StorageInsightsClient *operationalinsights.StorageInsightConfigsClient
+ WorkspacesClient *operationalinsights.WorkspacesClient
}
func NewClient(o *common.ClientOptions) *Client {
@@ -36,20 +36,20 @@ func NewClient(o *common.ClientOptions) *Client {
SolutionsClient := operationsmanagement.NewSolutionsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId, "Microsoft.OperationsManagement", "solutions", "testing")
o.ConfigureClient(&SolutionsClient.Client, o.ResourceManagerAuthorizer)
- StorageInsightConfigClient := operationalinsights.NewStorageInsightConfigsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
- o.ConfigureClient(&StorageInsightConfigClient.Client, o.ResourceManagerAuthorizer)
+ StorageInsightsClient := operationalinsights.NewStorageInsightConfigsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
+ o.ConfigureClient(&StorageInsightsClient.Client, o.ResourceManagerAuthorizer)
LinkedServicesClient := operationalinsights.NewLinkedServicesClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId)
o.ConfigureClient(&LinkedServicesClient.Client, o.ResourceManagerAuthorizer)
return &Client{
- DataExportClient: &DataExportClient,
- DataSourcesClient: &DataSourcesClient,
- LinkedServicesClient: &LinkedServicesClient,
- SavedSearchesClient: &SavedSearchesClient,
- SharedKeysClient: &SharedKeysClient,
- SolutionsClient: &SolutionsClient,
- StorageInsightConfigClient: &StorageInsightConfigClient,
- WorkspacesClient: &WorkspacesClient,
+ DataExportClient: &DataExportClient,
+ DataSourcesClient: &DataSourcesClient,
+ LinkedServicesClient: &LinkedServicesClient,
+ SavedSearchesClient: &SavedSearchesClient,
+ SharedKeysClient: &SharedKeysClient,
+ SolutionsClient: &SolutionsClient,
+ StorageInsightsClient: &StorageInsightsClient,
+ WorkspacesClient: &WorkspacesClient,
}
}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
similarity index 72%
rename from azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
rename to azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
index 8d54b9323783..3d1c560d6a5d 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_config_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
@@ -21,12 +21,12 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
-func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
+func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
return &schema.Resource{
- Create: resourceArmLogAnalyticsStorageInsightConfigCreateUpdate,
- Read: resourceArmLogAnalyticsStorageInsightConfigRead,
- Update: resourceArmLogAnalyticsStorageInsightConfigCreateUpdate,
- Delete: resourceArmLogAnalyticsStorageInsightConfigDelete,
+ Create: resourceArmLogAnalyticsStorageInsightsCreateUpdate,
+ Read: resourceArmLogAnalyticsStorageInsightsRead,
+ Update: resourceArmLogAnalyticsStorageInsightsCreateUpdate,
+ Delete: resourceArmLogAnalyticsStorageInsightsDelete,
Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(30 * time.Minute),
@@ -36,7 +36,7 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
},
Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
- _, err := parse.LogAnalyticsStorageInsightConfigID(id)
+ _, err := parse.LogAnalyticsStorageInsightsID(id)
return err
}),
@@ -45,13 +45,14 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
Type: schema.TypeString,
Required: true,
ForceNew: true,
- ValidateFunc: validate.LogAnalyticsStorageInsightConfigName,
+ ValidateFunc: validate.LogAnalyticsStorageInsightsName,
},
// must ignore case since API lowercases all returned data
+ // IssueP https://github.com/Azure/azure-sdk-for-go/issues/13268
"resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(),
- "workspace_resource_id": {
+ "workspace_id": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
@@ -59,7 +60,7 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
ValidateFunc: azure.ValidateResourceID,
},
- "storage_account_resource_id": {
+ "storage_account_id": {
Type: schema.TypeString,
Required: true,
ValidateFunc: azure.ValidateResourceID,
@@ -95,20 +96,20 @@ func resourceArmLogAnalyticsStorageInsightConfig() *schema.Resource {
},
}
}
-func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceData, meta interface{}) error {
- client := meta.(*clients.Client).LogAnalytics.StorageInsightConfigClient
+func resourceArmLogAnalyticsStorageInsightsCreateUpdate(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient
ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
defer cancel()
name := d.Get("name").(string)
resourceGroup := d.Get("resource_group_name").(string)
- storageAccountId := d.Get("storage_account_resource_id").(string)
+ storageAccountId := d.Get("storage_account_id").(string)
storageAccountKey := d.Get("storage_account_key").(string)
if len(strings.TrimSpace(storageAccountKey)) < 1 {
return fmt.Errorf("The argument 'storage_account_key' is required, but no definition was found.")
}
- workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_resource_id").(string))
+ workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_id").(string))
if err != nil {
return err
}
@@ -117,11 +118,11 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
existing, err := client.Get(ctx, resourceGroup, workspace.Name, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
- return fmt.Errorf("checking for present of existing Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
+ return fmt.Errorf("checking for present of existing Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
}
if existing.ID != nil && *existing.ID != "" {
- return tf.ImportAsExistsError("azurerm_log_analytics_storage_insight_config", *existing.ID)
+ return tf.ImportAsExistsError("azurerm_log_analytics_storage_insights", *existing.ID)
}
}
@@ -141,28 +142,28 @@ func resourceArmLogAnalyticsStorageInsightConfigCreateUpdate(d *schema.ResourceD
}
if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, name, parameters); err != nil {
- return fmt.Errorf("creating/updating Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
+ return fmt.Errorf("creating/updating Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
resp, err := client.Get(ctx, resourceGroup, workspace.Name, name)
if err != nil {
- return fmt.Errorf("retrieving Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
+ return fmt.Errorf("retrieving Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
}
if resp.ID == nil || *resp.ID == "" {
- return fmt.Errorf("empty or nil ID returned for Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.Name)
+ return fmt.Errorf("empty or nil ID returned for Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.Name)
}
d.SetId(*resp.ID)
- return resourceArmLogAnalyticsStorageInsightConfigRead(d, meta)
+ return resourceArmLogAnalyticsStorageInsightsRead(d, meta)
}
-func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, meta interface{}) error {
- client := meta.(*clients.Client).LogAnalytics.StorageInsightConfigClient
+func resourceArmLogAnalyticsStorageInsightsRead(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
defer cancel()
- id, err := parse.LogAnalyticsStorageInsightConfigID(d.Id())
+ id, err := parse.LogAnalyticsStorageInsightsID(d.Id())
if err != nil {
return err
}
@@ -173,20 +174,20 @@ func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, met
resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
- log.Printf("[INFO] Log Analytics Storage Insight Config %q does not exist - removing from state", d.Id())
+ log.Printf("[INFO] Log Analytics Storage Insights %q does not exist - removing from state", d.Id())
d.SetId("")
return nil
}
- return fmt.Errorf("retrieving Log Analytics Storage Insight Config %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
+ return fmt.Errorf("retrieving Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err)
}
d.Set("name", id.Name)
d.Set("resource_group_name", id.ResourceGroup)
- d.Set("workspace_resource_id", id.WorkspaceID)
+ d.Set("workspace_id", id.WorkspaceID)
if props := resp.StorageInsightProperties; props != nil {
d.Set("blob_container_names", utils.FlattenStringSlice(props.Containers))
- d.Set("storage_account_resource_id", props.StorageAccount.ID)
+ d.Set("storage_account_id", props.StorageAccount.ID)
d.Set("storage_account_key", storageAccountKey)
d.Set("table_names", utils.FlattenStringSlice(props.Tables))
}
@@ -194,12 +195,12 @@ func resourceArmLogAnalyticsStorageInsightConfigRead(d *schema.ResourceData, met
return tags.FlattenAndSet(d, resp.Tags)
}
-func resourceArmLogAnalyticsStorageInsightConfigDelete(d *schema.ResourceData, meta interface{}) error {
- client := meta.(*clients.Client).LogAnalytics.StorageInsightConfigClient
+func resourceArmLogAnalyticsStorageInsightsDelete(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient
ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
defer cancel()
- id, err := parse.LogAnalyticsStorageInsightConfigID(d.Id())
+ id, err := parse.LogAnalyticsStorageInsightsID(d.Id())
if err != nil {
return err
}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
similarity index 75%
rename from azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
rename to azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
index 566ba96842ef..a8aa41f59494 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
@@ -6,27 +6,27 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
)
-type LogAnalyticsStorageInsightConfigId struct {
+type LogAnalyticsStorageInsightsId struct {
ResourceGroup string
WorkspaceName string
WorkspaceID string
Name string
}
-func LogAnalyticsStorageInsightConfigID(input string) (*LogAnalyticsStorageInsightConfigId, error) {
+func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId, error) {
id, err := azure.ParseAzureResourceID(input)
if err != nil {
- return nil, fmt.Errorf("parsing Log Analytics Storage Insight Config ID %q: %+v", input, err)
+ return nil, fmt.Errorf("parsing Log Analytics Storage Insights ID %q: %+v", input, err)
}
- logAnalyticsStorageInsightConfig := LogAnalyticsStorageInsightConfigId{
+ logAnalyticsStorageInsightConfig := LogAnalyticsStorageInsightsId{
ResourceGroup: id.ResourceGroup,
}
if logAnalyticsStorageInsightConfig.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
return nil, err
}
if logAnalyticsStorageInsightConfig.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsightConfig.WorkspaceName); err != nil {
- return nil, fmt.Errorf("formatting Log Analytics Storage Insight Config workspace ID %q", input)
+ return nil, fmt.Errorf("formatting Log Analytics Storage Insights workspace ID %q", input)
}
if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageinsightconfigs"); err != nil {
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights_test.go
similarity index 91%
rename from azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
rename to azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights_test.go
index 5d5f821ba194..3525e6901478 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insight_config_test.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights_test.go
@@ -4,11 +4,11 @@ import (
"testing"
)
-func TestLogAnalyticsStorageInsightConfigID(t *testing.T) {
+func TestLogAnalyticsStorageInsightsID(t *testing.T) {
testData := []struct {
Name string
Input string
- Expected *LogAnalyticsStorageInsightConfigId
+ Expected *LogAnalyticsStorageInsightsId
}{
{
Name: "Empty",
@@ -38,7 +38,7 @@ func TestLogAnalyticsStorageInsightConfigID(t *testing.T) {
{
Name: "operationalinsights StorageInsightConfig ID",
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1",
- Expected: &LogAnalyticsStorageInsightConfigId{
+ Expected: &LogAnalyticsStorageInsightsId{
ResourceGroup: "resourceGroup1",
WorkspaceName: "workspace1",
WorkspaceID: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1",
@@ -55,7 +55,7 @@ func TestLogAnalyticsStorageInsightConfigID(t *testing.T) {
for _, v := range testData {
t.Logf("[DEBUG] Testing %q..", v.Name)
- actual, err := LogAnalyticsStorageInsightConfigID(v.Input)
+ actual, err := LogAnalyticsStorageInsightsID(v.Input)
if err != nil {
if v.Expected == nil {
continue
diff --git a/azurerm/internal/services/loganalytics/registration.go b/azurerm/internal/services/loganalytics/registration.go
index b22121bf8d6d..15fa27d774f6 100644
--- a/azurerm/internal/services/loganalytics/registration.go
+++ b/azurerm/internal/services/loganalytics/registration.go
@@ -33,7 +33,7 @@ func (r Registration) SupportedResources() map[string]*schema.Resource {
"azurerm_log_analytics_solution": resourceArmLogAnalyticsSolution(),
"azurerm_log_analytics_workspace": resourceArmLogAnalyticsWorkspace(),
"azurerm_log_analytics_datasource_windows_event": resourceArmLogAnalyticsDataSourceWindowsEvent(),
- "azurerm_log_analytics_storage_insight_config": resourceArmLogAnalyticsStorageInsightConfig(),
+ "azurerm_log_analytics_storage_insights": resourceArmLogAnalyticsStorageInsights(),
"azurerm_log_analytics_datasource_windows_performance_counter": resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter(),
}
}
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
similarity index 53%
rename from azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
rename to azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
index d353a562f5d3..7035870c3865 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insight_config_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
@@ -12,17 +12,17 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
-func TestAccAzureRMLogAnalyticsStorageInsightConfig_basic(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+func TestAccAzureRMLogAnalyticsStorageInsights_basic(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightsDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"), // key is not returned by the API
@@ -30,35 +30,35 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_basic(t *testing.T) {
})
}
-func TestAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+func TestAccAzureRMLogAnalyticsStorageInsights_requiresImport(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightsDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
- data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport),
+ data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsStorageInsights_requiresImport),
},
})
}
-func TestAccAzureRMLogAnalyticsStorageInsightConfig_complete(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+func TestAccAzureRMLogAnalyticsStorageInsights_complete(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightsDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_complete(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"), // key is not returned by the API
@@ -66,31 +66,31 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_complete(t *testing.T) {
})
}
-func TestAccAzureRMLogAnalyticsStorageInsightConfig_update(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+func TestAccAzureRMLogAnalyticsStorageInsights_update(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightsDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"),
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_complete(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"),
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"), // key is not returned by the API
@@ -98,24 +98,24 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_update(t *testing.T) {
})
}
-func TestAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insight_config", "test")
+func TestAccAzureRMLogAnalyticsStorageInsights_updateStorageAccount(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightsDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_complete(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"),
{
- Config: testAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(data),
+ Config: testAccAzureRMLogAnalyticsStorageInsights_updateStorageAccount(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsStorageInsightConfigExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
),
},
data.ImportStep("storage_account_key"), // key is not returned by the API
@@ -123,21 +123,21 @@ func TestAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(t *test
})
}
-func testCheckAzureRMLogAnalyticsStorageInsightConfigExists(resourceName string) resource.TestCheckFunc {
+func testCheckAzureRMLogAnalyticsStorageInsightsExists(resourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
- client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.StorageInsightConfigClient
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.StorageInsightsClient
ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
rs, ok := s.RootModule().Resources[resourceName]
if !ok {
- return fmt.Errorf("Log Analytics Storage Insight Config not found: %s", resourceName)
+ return fmt.Errorf("Log Analytics Storage Insights not found: %s", resourceName)
}
- id, err := parse.LogAnalyticsStorageInsightConfigID(rs.Primary.ID)
+ id, err := parse.LogAnalyticsStorageInsightsID(rs.Primary.ID)
if err != nil {
return err
}
if resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil {
if !utils.ResponseWasNotFound(resp.Response) {
- return fmt.Errorf("bad: Log Analytics Storage Insight Config %q does not exist", id.Name)
+ return fmt.Errorf("bad: Log Analytics Storage Insights %q does not exist", id.Name)
}
return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightConfigClient: %+v", err)
}
@@ -145,15 +145,15 @@ func testCheckAzureRMLogAnalyticsStorageInsightConfigExists(resourceName string)
}
}
-func testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy(s *terraform.State) error {
- client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.StorageInsightConfigClient
+func testCheckAzureRMLogAnalyticsStorageInsightsDestroy(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.StorageInsightsClient
ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
for _, rs := range s.RootModule().Resources {
- if rs.Type != "azurerm_log_analytics_storage_insight_config" {
+ if rs.Type != "azurerm_log_analytics_storage_insights" {
continue
}
- id, err := parse.LogAnalyticsStorageInsightConfigID(rs.Primary.ID)
+ id, err := parse.LogAnalyticsStorageInsightsID(rs.Primary.ID)
if err != nil {
return err
}
@@ -167,7 +167,7 @@ func testCheckAzureRMLogAnalyticsStorageInsightConfigDestroy(s *terraform.State)
return nil
}
-func testAccAzureRMLogAnalyticsStorageInsightConfig_template(data acceptance.TestData) string {
+func testAccAzureRMLogAnalyticsStorageInsights_template(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
@@ -196,59 +196,59 @@ resource "azurerm_storage_account" "test" {
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomString)
}
-func testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsStorageInsightConfig_template(data)
+func testAccAzureRMLogAnalyticsStorageInsights_basic(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsights_template(data)
return fmt.Sprintf(`
%s
-resource "azurerm_log_analytics_storage_insight_config" "test" {
+resource "azurerm_log_analytics_storage_insights" "test" {
name = "acctest-la-%d"
resource_group_name = azurerm_resource_group.test.name
- workspace_resource_id = azurerm_log_analytics_workspace.test.id
+ workspace_id = azurerm_log_analytics_workspace.test.id
- storage_account_resource_id = azurerm_storage_account.test.id
- storage_account_key = azurerm_storage_account.test.primary_access_key
+ storage_account_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
}
`, template, data.RandomInteger)
}
-func testAccAzureRMLogAnalyticsStorageInsightConfig_requiresImport(data acceptance.TestData) string {
- config := testAccAzureRMLogAnalyticsStorageInsightConfig_basic(data)
+func testAccAzureRMLogAnalyticsStorageInsights_requiresImport(data acceptance.TestData) string {
+ config := testAccAzureRMLogAnalyticsStorageInsights_basic(data)
return fmt.Sprintf(`
%s
-resource "azurerm_log_analytics_storage_insight_config" "import" {
- name = azurerm_log_analytics_storage_insight_config.test.name
- resource_group_name = azurerm_log_analytics_storage_insight_config.test.resource_group_name
- workspace_resource_id = azurerm_log_analytics_storage_insight_config.test.workspace_resource_id
+resource "azurerm_log_analytics_storage_insights" "import" {
+ name = azurerm_log_analytics_storage_insights.test.name
+ resource_group_name = azurerm_log_analytics_storage_insights.test.resource_group_name
+ workspace_id = azurerm_log_analytics_storage_insights.test.workspace_id
- storage_account_resource_id = azurerm_storage_account.test.id
- storage_account_key = azurerm_storage_account.test.primary_access_key
+ storage_account_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
}
`, config)
}
-func testAccAzureRMLogAnalyticsStorageInsightConfig_complete(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsStorageInsightConfig_template(data)
+func testAccAzureRMLogAnalyticsStorageInsights_complete(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsights_template(data)
return fmt.Sprintf(`
%s
-resource "azurerm_log_analytics_storage_insight_config" "test" {
+resource "azurerm_log_analytics_storage_insights" "test" {
name = "acctest-LA-%d"
resource_group_name = azurerm_resource_group.test.name
- workspace_resource_id = azurerm_log_analytics_workspace.test.id
+ workspace_id = azurerm_log_analytics_workspace.test.id
blob_container_names = ["wad-iis-logfiles"]
table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
- storage_account_resource_id = azurerm_storage_account.test.id
- storage_account_key = azurerm_storage_account.test.primary_access_key
+ storage_account_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
}
`, template, data.RandomInteger)
}
-func testAccAzureRMLogAnalyticsStorageInsightConfig_updateStorageAccount(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsStorageInsightConfig_template(data)
+func testAccAzureRMLogAnalyticsStorageInsights_updateStorageAccount(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsights_template(data)
return fmt.Sprintf(`
%s
@@ -261,16 +261,16 @@ resource "azurerm_storage_account" "test2" {
account_replication_type = "LRS"
}
-resource "azurerm_log_analytics_storage_insight_config" "test" {
+resource "azurerm_log_analytics_storage_insights" "test" {
name = "acctest-la-%d"
resource_group_name = azurerm_resource_group.test.name
- workspace_resource_id = azurerm_log_analytics_workspace.test.id
+ workspace_id = azurerm_log_analytics_workspace.test.id
blob_container_names = ["wad-iis-logfiles"]
table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
- storage_account_resource_id = azurerm_storage_account.test2.id
- storage_account_key = azurerm_storage_account.test2.primary_access_key
+ storage_account_id = azurerm_storage_account.test2.id
+ storage_account_key = azurerm_storage_account.test2.primary_access_key
}
`, template, data.RandomStringOfLength(6), data.RandomInteger)
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
deleted file mode 100644
index 4971bf785534..000000000000
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package validate
-
-func LogAnalyticsStorageInsightConfigName(i interface{}, k string) (warnings []string, errors []error) {
- return LogAnalyticsGenericName(i, k)
-}
-
-func LogAnalyticsStorageInsightConfigWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
- return LogAnalyticsGenericName(i, k)
-}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go
new file mode 100644
index 000000000000..2d7e24ace0b6
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go
@@ -0,0 +1,9 @@
+package validate
+
+func LogAnalyticsStorageInsightsName(i interface{}, k string) (warnings []string, errors []error) {
+ return LogAnalyticsGenericName(i, k)
+}
+
+func LogAnalyticsStorageInsightsWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
+ return LogAnalyticsGenericName(i, k)
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_test.go
similarity index 90%
rename from azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
rename to azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_test.go
index 5e0029d9652b..aaffe53bcc8b 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insight_config_test.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights_test.go
@@ -4,7 +4,7 @@ import (
"testing"
)
-func TestLogAnalyticsStorageInsightConfigWorkspaceName(t *testing.T) {
+func TestLogAnalyticsStorageInsightsWorkspaceName(t *testing.T) {
testCases := []struct {
Name string
Input string
@@ -64,7 +64,7 @@ func TestLogAnalyticsStorageInsightConfigWorkspaceName(t *testing.T) {
for _, v := range testCases {
t.Logf("[DEBUG] Testing %q..", v.Name)
- _, errors := LogAnalyticsStorageInsightConfigWorkspaceName(v.Input, "workspace_name")
+ _, errors := LogAnalyticsStorageInsightsWorkspaceName(v.Input, "workspace_name")
result := len(errors) == 0
if result != v.Expected {
t.Fatalf("Expected the result to be %v but got %v (and %d errors)", v.Expected, result, len(errors))
diff --git a/website/azurerm.erb b/website/azurerm.erb
index 3dde6ac0e594..bd2e9506b707 100644
--- a/website/azurerm.erb
+++ b/website/azurerm.erb
@@ -2024,7 +2024,7 @@
- azurerm_log_analytics_storage_insight_config
+ azurerm_log_analytics_storage_insights
diff --git a/website/docs/r/log_analytics_storage_insight_config.html.markdown b/website/docs/r/log_analytics_storage_insights.html.markdown
similarity index 55%
rename from website/docs/r/log_analytics_storage_insight_config.html.markdown
rename to website/docs/r/log_analytics_storage_insights.html.markdown
index 8e86b241d23d..b1a86ddc03ff 100644
--- a/website/docs/r/log_analytics_storage_insight_config.html.markdown
+++ b/website/docs/r/log_analytics_storage_insights.html.markdown
@@ -1,14 +1,14 @@
---
subcategory: "Log Analytics"
layout: "azurerm"
-page_title: "Azure Resource Manager: azurerm_log_analytics_storage_insight_config"
+page_title: "Azure Resource Manager: azurerm_log_analytics_storage_insights"
description: |-
- Manages a Log Analytics Storage Insight Config.
+ Manages a Log Analytics Storage Insights resource.
---
-# azurerm_log_analytics_storage_insight_config
+# azurerm_log_analytics_storage_insights
-Manages a Log Analytics Storage Insight Config.
+Manages a Log Analytics Storage Insights resource.
## Example Usage
@@ -34,13 +34,13 @@ resource "azurerm_storage_account" "example" {
account_replication_type = "LRS"
}
-resource "azurerm_log_analytics_storage_insight_config" "example" {
- name = "example-storageinsightconfig"
- resource_group_name = azurerm_resource_group.example.name
- workspace_resource_id = azurerm_log_analytics_workspace.example.id
+resource "azurerm_log_analytics_storage_insights" "example" {
+ name = "example-storageinsightconfig"
+ resource_group_name = azurerm_resource_group.example.name
+ workspace_id = azurerm_log_analytics_workspace.example.id
- storage_account_resource_id = azurerm_storage_account.example.id
- storage_account_key = azurerm_storage_account.example.primary_access_key
+ storage_account_id = azurerm_storage_account.example.id
+ storage_account_key = azurerm_storage_account.example.primary_access_key
}
```
@@ -48,13 +48,13 @@ resource "azurerm_log_analytics_storage_insight_config" "example" {
The following arguments are supported:
-* `name` - (Required) The name which should be used for this Log Analytics Storage Insight Config. Changing this forces a new Log Analytics Storage Insight Config to be created.
+* `name` - (Required) The name which should be used for this Log Analytics Storage Insights. Changing this forces a new Log Analytics Storage Insights to be created.
-* `resource_group_name` - (Required) The name of the Resource Group where the Log Analytics Storage Insight Config should exist. Changing this forces a new Log Analytics Storage Insight Config to be created.
+* `resource_group_name` - (Required) The name of the Resource Group where the Log Analytics Storage Insights should exist. Changing this forces a new Log Analytics Storage Insights to be created.
-* `workspace_resource_id` - (Required) The resource ID of the workspace to create the Log Analytics Storage Insight Config within. Changing this forces a new Log Analytics Storage Insight Config to be created.
+* `workspace_id` - (Required) The resource ID of the workspace to create the Log Analytics Storage Insights within. Changing this forces a new Log Analytics Storage Insights to be created.
-* `storage_account_resource_id` - (Required) The resource ID of the storage account to be used by this Log Analytics Storage Insight Config.
+* `storage_account_id` - (Required) The resource ID of the storage account to be used by this Log Analytics Storage Insights.
* `storage_account_key` - (Required) The storage access key to be used to connect to the storage account.
@@ -62,27 +62,27 @@ The following arguments are supported:
* `table_names` - (Optional) The names of the Azure tables that the workspace should read.
-* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Storage Insight Config.
+* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Storage Insights.
## Attributes Reference
In addition to the Arguments listed above - the following Attributes are exported:
-* `id` - The ID of the Log Analytics Storage Insight Config.
+* `id` - The ID of the Log Analytics Storage Insights.
## Timeouts
The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
-* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Storage Insight Config.
-* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Storage Insight Config.
-* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Storage Insight Config.
-* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Storage Insight Config.
+* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Storage Insights.
+* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Storage Insights.
+* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Storage Insights.
+* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Storage Insights.
## Import
Log Analytics Storage Insight Configs can be imported using the `resource id`, e.g.
```shell
-terraform import azurerm_log_analytics_storage_insight_config.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1
+terraform import azurerm_log_analytics_storage_insights.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1
```
\ No newline at end of file
From 68676d735acbc014dd9afc477382d49878fd46e4 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 4 Nov 2020 22:40:44 -0800
Subject: [PATCH 10/46] Fix test lint errors
---
...nalytics_storage_insights_resource_test.go | 28 +++++++++----------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
index 7035870c3865..1aafdcd6195e 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
@@ -139,7 +139,7 @@ func testCheckAzureRMLogAnalyticsStorageInsightsExists(resourceName string) reso
if !utils.ResponseWasNotFound(resp.Response) {
return fmt.Errorf("bad: Log Analytics Storage Insights %q does not exist", id.Name)
}
- return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightConfigClient: %+v", err)
+ return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightsClient: %+v", err)
}
return nil
}
@@ -159,7 +159,7 @@ func testCheckAzureRMLogAnalyticsStorageInsightsDestroy(s *terraform.State) erro
}
if resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil {
if !utils.ResponseWasNotFound(resp.Response) {
- return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightConfigClient: %+v", err)
+ return fmt.Errorf("bad: Get on LogAnalytics.StorageInsightsClient: %+v", err)
}
}
return nil
@@ -202,9 +202,9 @@ func testAccAzureRMLogAnalyticsStorageInsights_basic(data acceptance.TestData) s
%s
resource "azurerm_log_analytics_storage_insights" "test" {
- name = "acctest-la-%d"
- resource_group_name = azurerm_resource_group.test.name
- workspace_id = azurerm_log_analytics_workspace.test.id
+ name = "acctest-la-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_id = azurerm_log_analytics_workspace.test.id
storage_account_id = azurerm_storage_account.test.id
storage_account_key = azurerm_storage_account.test.primary_access_key
@@ -218,9 +218,9 @@ func testAccAzureRMLogAnalyticsStorageInsights_requiresImport(data acceptance.Te
%s
resource "azurerm_log_analytics_storage_insights" "import" {
- name = azurerm_log_analytics_storage_insights.test.name
- resource_group_name = azurerm_log_analytics_storage_insights.test.resource_group_name
- workspace_id = azurerm_log_analytics_storage_insights.test.workspace_id
+ name = azurerm_log_analytics_storage_insights.test.name
+ resource_group_name = azurerm_log_analytics_storage_insights.test.resource_group_name
+ workspace_id = azurerm_log_analytics_storage_insights.test.workspace_id
storage_account_id = azurerm_storage_account.test.id
storage_account_key = azurerm_storage_account.test.primary_access_key
@@ -234,9 +234,9 @@ func testAccAzureRMLogAnalyticsStorageInsights_complete(data acceptance.TestData
%s
resource "azurerm_log_analytics_storage_insights" "test" {
- name = "acctest-LA-%d"
- resource_group_name = azurerm_resource_group.test.name
- workspace_id = azurerm_log_analytics_workspace.test.id
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_id = azurerm_log_analytics_workspace.test.id
blob_container_names = ["wad-iis-logfiles"]
table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
@@ -262,9 +262,9 @@ resource "azurerm_storage_account" "test2" {
}
resource "azurerm_log_analytics_storage_insights" "test" {
- name = "acctest-la-%d"
- resource_group_name = azurerm_resource_group.test.name
- workspace_id = azurerm_log_analytics_workspace.test.id
+ name = "acctest-la-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_id = azurerm_log_analytics_workspace.test.id
blob_container_names = ["wad-iis-logfiles"]
table_names = ["WADWindowsEventLogsTable", "LinuxSyslogVer2v0"]
From 248d0001c4e275f41525cea9c6fa7a08c14f531b Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 08:14:08 +0000
Subject: [PATCH 11/46] rename var in parse to match resource
---
.../parse/log_analytics_storage_insights.go | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
index a8aa41f59494..de35f9f0260d 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
@@ -19,17 +19,17 @@ func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId
return nil, fmt.Errorf("parsing Log Analytics Storage Insights ID %q: %+v", input, err)
}
- logAnalyticsStorageInsightConfig := LogAnalyticsStorageInsightsId{
+ logAnalyticsStorageInsight := LogAnalyticsStorageInsightsId{
ResourceGroup: id.ResourceGroup,
}
- if logAnalyticsStorageInsightConfig.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
+ if logAnalyticsStorageInsight.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
return nil, err
}
- if logAnalyticsStorageInsightConfig.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsightConfig.WorkspaceName); err != nil {
+ if logAnalyticsStorageInsight.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsight.WorkspaceName); err != nil {
return nil, fmt.Errorf("formatting Log Analytics Storage Insights workspace ID %q", input)
}
- if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
- if logAnalyticsStorageInsightConfig.Name, err = id.PopSegment("storageinsightconfigs"); err != nil {
+ if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
+ if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageinsightconfigs"); err != nil {
return nil, err
}
}
@@ -37,5 +37,5 @@ func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId
return nil, err
}
- return &logAnalyticsStorageInsightConfig, nil
+ return &logAnalyticsStorageInsight, nil
}
From 7bca8cd94ec81638d84c24dc3bc118fa841cb39d Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 08:22:52 +0000
Subject: [PATCH 12/46] use storage account id validation from storage package
---
.../loganalytics/log_analytics_storage_insights_resource.go | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
index 3d1c560d6a5d..80ca69e69986 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
@@ -15,6 +15,7 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
+ storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
@@ -63,7 +64,7 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
"storage_account_id": {
Type: schema.TypeString,
Required: true,
- ValidateFunc: azure.ValidateResourceID,
+ ValidateFunc: storageValidate.StorageAccountID,
},
"storage_account_key": {
From 491114891b047406983570bb402dc73ca542521b Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 10:55:04 +0000
Subject: [PATCH 13/46] update to new ID pattern
---
...log_analytics_storage_insights_resource.go | 37 +++++++------------
.../parse/log_analytics_storage_insights.go | 22 ++++++++++-
2 files changed, 35 insertions(+), 24 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
index 80ca69e69986..13c5f70b777a 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
@@ -58,7 +58,7 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
Required: true,
ForceNew: true,
DiffSuppressFunc: suppress.CaseDifference,
- ValidateFunc: azure.ValidateResourceID,
+ ValidateFunc: validate.LogAnalyticsWorkspaceID,
},
"storage_account_id": {
@@ -68,10 +68,11 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
},
"storage_account_key": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- Sensitive: true,
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ Sensitive: true,
+ // TODO -
ValidateFunc: validate.IsBase64Encoded,
},
@@ -99,6 +100,7 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
}
func resourceArmLogAnalyticsStorageInsightsCreateUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).LogAnalytics.StorageInsightsClient
+ subscriptionId := meta.(*clients.Client).Account.SubscriptionId
ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
defer cancel()
@@ -110,16 +112,14 @@ func resourceArmLogAnalyticsStorageInsightsCreateUpdate(d *schema.ResourceData,
return fmt.Errorf("The argument 'storage_account_key' is required, but no definition was found.")
}
- workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_id").(string))
- if err != nil {
- return err
- }
+ workspaceId := d.Get("workspace_id").(string)
+ id := parse.NewLogAnalyticsStorageInsightsId(resourceGroup, workspaceId, name)
if d.IsNewResource() {
- existing, err := client.Get(ctx, resourceGroup, workspace.Name, name)
+ existing, err := client.Get(ctx, resourceGroup, id.WorkspaceName, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
- return fmt.Errorf("checking for present of existing Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
+ return fmt.Errorf("checking for present of existing Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, id.WorkspaceName, err)
}
}
if existing.ID != nil && *existing.ID != "" {
@@ -142,20 +142,11 @@ func resourceArmLogAnalyticsStorageInsightsCreateUpdate(d *schema.ResourceData,
parameters.StorageInsightProperties.Containers = utils.ExpandStringSlice(d.Get("blob_container_names").(*schema.Set).List())
}
- if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, name, parameters); err != nil {
- return fmt.Errorf("creating/updating Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
- }
-
- resp, err := client.Get(ctx, resourceGroup, workspace.Name, name)
- if err != nil {
- return fmt.Errorf("retrieving Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err)
- }
-
- if resp.ID == nil || *resp.ID == "" {
- return fmt.Errorf("empty or nil ID returned for Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.Name)
+ if _, err := client.CreateOrUpdate(ctx, resourceGroup, id.WorkspaceName, name, parameters); err != nil {
+ return fmt.Errorf("creating/updating Log Analytics Storage Insights %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, id.WorkspaceName, err)
}
- d.SetId(*resp.ID)
+ d.SetId(id.ID(subscriptionId))
return resourceArmLogAnalyticsStorageInsightsRead(d, meta)
}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
index de35f9f0260d..c83b6aceb5d3 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
@@ -13,6 +13,26 @@ type LogAnalyticsStorageInsightsId struct {
Name string
}
+// Note - this API currently lower-cases all return values
+// Issue tracked here - https://github.com/Azure/azure-sdk-for-go/issues/13268
+const fmtWorkspaceId = "/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s"
+
+func (id LogAnalyticsStorageInsightsId) ID(subscriptionId string) string {
+ fmtString := "/subscriptions/%s/resourcegroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/storageInsightConfigs/%s"
+ return fmt.Sprintf(fmtString, subscriptionId, id.ResourceGroup, id.WorkspaceName, id.Name)
+}
+
+func NewLogAnalyticsStorageInsightsId(resourceGroup, workspaceId, name string) LogAnalyticsStorageInsightsId {
+ // (@jackofallops) ignoring error here as already passed through validation in schema
+ workspace, _ := LogAnalyticsWorkspaceID(workspaceId)
+ return LogAnalyticsStorageInsightsId{
+ ResourceGroup: resourceGroup,
+ WorkspaceName: workspace.Name,
+ WorkspaceID: workspaceId,
+ Name: name,
+ }
+}
+
func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId, error) {
id, err := azure.ParseAzureResourceID(input)
if err != nil {
@@ -25,7 +45,7 @@ func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId
if logAnalyticsStorageInsight.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
return nil, err
}
- if logAnalyticsStorageInsight.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsight.WorkspaceName); err != nil {
+ if logAnalyticsStorageInsight.WorkspaceID = fmt.Sprintf(fmtWorkspaceId, id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsight.WorkspaceName); err != nil {
return nil, fmt.Errorf("formatting Log Analytics Storage Insights workspace ID %q", input)
}
if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
From d2a1f3d960436428a0ae67a6be24e710e0e1ebfb Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 10:55:27 +0000
Subject: [PATCH 14/46] terrafmt log analytics docs
---
.../docs/r/log_analytics_linked_storage_account.html.markdown | 2 +-
website/docs/r/log_analytics_storage_insights.html.markdown | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/website/docs/r/log_analytics_linked_storage_account.html.markdown b/website/docs/r/log_analytics_linked_storage_account.html.markdown
index d300f4ca39e6..87b232226739 100644
--- a/website/docs/r/log_analytics_linked_storage_account.html.markdown
+++ b/website/docs/r/log_analytics_linked_storage_account.html.markdown
@@ -74,4 +74,4 @@ Log Analytics Linked Storage Accounts can be imported using the `resource id`, e
```shell
terraform import azurerm_log_analytics_linked_storage_account.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/linkedStorageAccounts/{dataSourceType}
-```
\ No newline at end of file
+```
diff --git a/website/docs/r/log_analytics_storage_insights.html.markdown b/website/docs/r/log_analytics_storage_insights.html.markdown
index b1a86ddc03ff..ec6b4c371f9d 100644
--- a/website/docs/r/log_analytics_storage_insights.html.markdown
+++ b/website/docs/r/log_analytics_storage_insights.html.markdown
@@ -85,4 +85,4 @@ Log Analytics Storage Insight Configs can be imported using the `resource id`, e
```shell
terraform import azurerm_log_analytics_storage_insights.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1
-```
\ No newline at end of file
+```
From 3ba8c3af59d9910be73e98c8b647c04fd501d109 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 12:19:33 +0000
Subject: [PATCH 15/46] review updates and fixes
---
azurerm/internal/acceptance/testcase.go | 2 +-
...log_analytics_storage_insights_resource.go | 34 +++++++++----------
.../parse/log_analytics_storage_insights.go | 10 ++----
.../validate/log_analytics_data_export.go | 4 +--
.../log_analytics_linked_storage_account.go | 26 --------------
.../validate/log_analytics_name.go | 13 +++----
.../log_analytics_storage_insights.go | 4 +--
...g_analytics_storage_insights.html.markdown | 6 ++--
8 files changed, 32 insertions(+), 67 deletions(-)
diff --git a/azurerm/internal/acceptance/testcase.go b/azurerm/internal/acceptance/testcase.go
index bc5a83c823cd..e103aa3fa019 100644
--- a/azurerm/internal/acceptance/testcase.go
+++ b/azurerm/internal/acceptance/testcase.go
@@ -15,10 +15,10 @@ import (
// fixing when we move to Binary Testing so that we can test across provider instances
var enableBinaryTesting = false
+// lintignore:AT001
func (td TestData) DataSourceTest(t *testing.T, steps []resource.TestStep) {
// DataSources don't need a check destroy - however since this is a wrapper function
// and not matching the ignore pattern `XXX_data_source_test.go`, this needs to be explicitly opted out
- //lintignore:AT001
testCase := resource.TestCase{
PreCheck: func() { PreCheck(t) },
Steps: steps,
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
index 13c5f70b777a..f112da19ee7d 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
@@ -3,14 +3,12 @@ package loganalytics
import (
"fmt"
"log"
- "strings"
"time"
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
- "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
@@ -51,14 +49,13 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
// must ignore case since API lowercases all returned data
// IssueP https://github.com/Azure/azure-sdk-for-go/issues/13268
- "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(),
+ "resource_group_name": azure.SchemaResourceGroupName(),
"workspace_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- DiffSuppressFunc: suppress.CaseDifference,
- ValidateFunc: validate.LogAnalyticsWorkspaceID,
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateFunc: validate.LogAnalyticsWorkspaceID,
},
"storage_account_id": {
@@ -69,18 +66,20 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
"storage_account_key": {
Type: schema.TypeString,
- Optional: true,
- Computed: true,
+ Required: true,
Sensitive: true,
- // TODO -
- ValidateFunc: validate.IsBase64Encoded,
+ ValidateFunc: validation.All(
+ validation.StringIsNotEmpty,
+ validate.IsBase64Encoded,
+ ),
},
"blob_container_names": {
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Schema{
- Type: schema.TypeString,
+ Type: schema.TypeString,
+ ValidateFunc: validation.NoZeroValues,
},
},
@@ -108,9 +107,6 @@ func resourceArmLogAnalyticsStorageInsightsCreateUpdate(d *schema.ResourceData,
resourceGroup := d.Get("resource_group_name").(string)
storageAccountId := d.Get("storage_account_id").(string)
storageAccountKey := d.Get("storage_account_key").(string)
- if len(strings.TrimSpace(storageAccountKey)) < 1 {
- return fmt.Errorf("The argument 'storage_account_key' is required, but no definition was found.")
- }
workspaceId := d.Get("workspace_id").(string)
id := parse.NewLogAnalyticsStorageInsightsId(resourceGroup, workspaceId, name)
@@ -179,7 +175,11 @@ func resourceArmLogAnalyticsStorageInsightsRead(d *schema.ResourceData, meta int
if props := resp.StorageInsightProperties; props != nil {
d.Set("blob_container_names", utils.FlattenStringSlice(props.Containers))
- d.Set("storage_account_id", props.StorageAccount.ID)
+ storageAccountId := ""
+ if props.StorageAccount != nil && props.StorageAccount.ID != nil {
+ storageAccountId = *props.StorageAccount.ID
+ }
+ d.Set("storage_account_id", storageAccountId)
d.Set("storage_account_key", storageAccountKey)
d.Set("table_names", utils.FlattenStringSlice(props.Tables))
}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
index c83b6aceb5d3..38149a434e01 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
@@ -13,12 +13,8 @@ type LogAnalyticsStorageInsightsId struct {
Name string
}
-// Note - this API currently lower-cases all return values
-// Issue tracked here - https://github.com/Azure/azure-sdk-for-go/issues/13268
-const fmtWorkspaceId = "/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s"
-
func (id LogAnalyticsStorageInsightsId) ID(subscriptionId string) string {
- fmtString := "/subscriptions/%s/resourcegroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/storageInsightConfigs/%s"
+ fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/workspaces/%s/storageInsightConfigs/%s"
return fmt.Sprintf(fmtString, subscriptionId, id.ResourceGroup, id.WorkspaceName, id.Name)
}
@@ -45,9 +41,7 @@ func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId
if logAnalyticsStorageInsight.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
return nil, err
}
- if logAnalyticsStorageInsight.WorkspaceID = fmt.Sprintf(fmtWorkspaceId, id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsStorageInsight.WorkspaceName); err != nil {
- return nil, fmt.Errorf("formatting Log Analytics Storage Insights workspace ID %q", input)
- }
+ logAnalyticsStorageInsight.WorkspaceID = NewLogAnalyticsWorkspaceID(logAnalyticsStorageInsight.WorkspaceName, id.ResourceGroup).ID(id.SubscriptionID)
if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageinsightconfigs"); err != nil {
return nil, err
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go
index 80b4f6ca7051..020c71360840 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_data_export.go
@@ -1,9 +1,9 @@
package validate
func LogAnalyticsDataExportWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
- return LogAnalyticsGenericName(i, k)
+ return logAnalyticsGenericName(i, k)
}
func LogAnalyticsDataExportName(i interface{}, k string) (warnings []string, errors []error) {
- return LogAnalyticsGenericName(i, k)
+ return logAnalyticsGenericName(i, k)
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go
index 35b6f7a017f7..e636771910fd 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_linked_storage_account.go
@@ -1,10 +1,5 @@
package validate
-import (
- "fmt"
- "regexp"
-)
-
func LogAnalyticsLinkedStorageAccountWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
return logAnalyticsGenericName(i, k)
}
@@ -12,24 +7,3 @@ func LogAnalyticsLinkedStorageAccountWorkspaceName(i interface{}, k string) (war
func LogAnalyticsLinkedStorageAccountName(i interface{}, k string) (warnings []string, errors []error) {
return logAnalyticsGenericName(i, k)
}
-
-func logAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) {
- v, ok := i.(string)
- if !ok {
- errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
- return
- }
- if len(v) < 4 {
- errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
- return
- }
- if len(v) > 63 {
- errors = append(errors, fmt.Errorf("length should be less than %d", 63))
- return
- }
- if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
- errors = append(errors, fmt.Errorf("expected value of %s does not match regular expression, got %v", k, v))
- return
- }
- return
-}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
index cf5937cd43d0..390aaa86200a 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
@@ -7,25 +7,22 @@ import (
"strings"
)
-func LogAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) {
+func logAnalyticsGenericName(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(string)
if !ok {
- errors = append(errors, fmt.Errorf("expected %q to be a string", k))
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
return
}
-
if len(v) < 4 {
- errors = append(errors, fmt.Errorf("%q length should be greater than or equal to %d characters in length", k, 4))
+ errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
return
}
-
if len(v) > 63 {
- errors = append(errors, fmt.Errorf("%q length should be less than or equal %d characters in length", k, 63))
+ errors = append(errors, fmt.Errorf("length should be less than %d", 63))
return
}
-
if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
- errors = append(errors, fmt.Errorf("the %q is invalid, the %q must begin with an alphanumeric character, end with an alphanumeric character and may only contain alphanumeric characters or hyphens, got %q", k, k, v))
+ errors = append(errors, fmt.Errorf("expected value of %s does not match regular expression, got %v", k, v))
return
}
return
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go
index 2d7e24ace0b6..9b2a002e7de6 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_storage_insights.go
@@ -1,9 +1,9 @@
package validate
func LogAnalyticsStorageInsightsName(i interface{}, k string) (warnings []string, errors []error) {
- return LogAnalyticsGenericName(i, k)
+ return logAnalyticsGenericName(i, k)
}
func LogAnalyticsStorageInsightsWorkspaceName(i interface{}, k string) (warnings []string, errors []error) {
- return LogAnalyticsGenericName(i, k)
+ return logAnalyticsGenericName(i, k)
}
diff --git a/website/docs/r/log_analytics_storage_insights.html.markdown b/website/docs/r/log_analytics_storage_insights.html.markdown
index ec6b4c371f9d..eb20ae55f631 100644
--- a/website/docs/r/log_analytics_storage_insights.html.markdown
+++ b/website/docs/r/log_analytics_storage_insights.html.markdown
@@ -52,9 +52,9 @@ The following arguments are supported:
* `resource_group_name` - (Required) The name of the Resource Group where the Log Analytics Storage Insights should exist. Changing this forces a new Log Analytics Storage Insights to be created.
-* `workspace_id` - (Required) The resource ID of the workspace to create the Log Analytics Storage Insights within. Changing this forces a new Log Analytics Storage Insights to be created.
+* `workspace_id` - (Required) The ID of the Log Analytics Workspace within which the Storage Insights should exist. Changing this forces a new Log Analytics Storage Insights to be created.
-* `storage_account_id` - (Required) The resource ID of the storage account to be used by this Log Analytics Storage Insights.
+* `storage_account_id` - (Required) The ID of the Storage Account used by this Log Analytics Storage Insights.
* `storage_account_key` - (Required) The storage access key to be used to connect to the storage account.
@@ -84,5 +84,5 @@ The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/d
Log Analytics Storage Insight Configs can be imported using the `resource id`, e.g.
```shell
-terraform import azurerm_log_analytics_storage_insights.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1
+terraform import azurerm_log_analytics_storage_insights.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/storageInsightConfigs/storageInsight1
```
From 777938f0d470b54327ebca37e3f56ed1d2d78536 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 12:24:20 +0000
Subject: [PATCH 16/46] case sensitivity back into id paths
---
.../loganalytics/parse/log_analytics_storage_insights.go | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
index 38149a434e01..b860569555e3 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_storage_insights.go
@@ -38,15 +38,16 @@ func LogAnalyticsStorageInsightsID(input string) (*LogAnalyticsStorageInsightsId
logAnalyticsStorageInsight := LogAnalyticsStorageInsightsId{
ResourceGroup: id.ResourceGroup,
}
+
if logAnalyticsStorageInsight.WorkspaceName, err = id.PopSegment("workspaces"); err != nil {
return nil, err
}
+
logAnalyticsStorageInsight.WorkspaceID = NewLogAnalyticsWorkspaceID(logAnalyticsStorageInsight.WorkspaceName, id.ResourceGroup).ID(id.SubscriptionID)
if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageInsightConfigs"); err != nil {
- if logAnalyticsStorageInsight.Name, err = id.PopSegment("storageinsightconfigs"); err != nil {
- return nil, err
- }
+ return nil, err
}
+
if err := id.ValidateNoEmptySegments(input); err != nil {
return nil, err
}
From 9121432add2102f3dcff960a80888115d8e22520 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Thu, 5 Nov 2020 13:02:53 +0000
Subject: [PATCH 17/46] further review updates
---
.../log_analytics_storage_insights_import.go | 18 ++++++++++++++++++
.../log_analytics_storage_insights_resource.go | 14 +++-----------
2 files changed, 21 insertions(+), 11 deletions(-)
create mode 100644 azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go
new file mode 100644
index 000000000000..006e135158f3
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_import.go
@@ -0,0 +1,18 @@
+package loganalytics
+
+import (
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+)
+
+func logAnalyticsStorageInsightsImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
+ if _, err := parse.LogAnalyticsStorageInsightsID(d.Id()); err != nil {
+ return []*schema.ResourceData{d}, err
+ }
+
+ if v, ok := d.GetOk("storage_account_key"); ok && v.(string) != "" {
+ d.Set("storage_account_key", v)
+ }
+
+ return []*schema.ResourceData{d}, nil
+}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
index f112da19ee7d..a715ec81f07b 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_storage_insights_resource.go
@@ -15,7 +15,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
storageValidate "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/storage/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
- azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
@@ -34,10 +33,9 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
Delete: schema.DefaultTimeout(30 * time.Minute),
},
- Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
- _, err := parse.LogAnalyticsStorageInsightsID(id)
- return err
- }),
+ Importer: &schema.ResourceImporter{
+ State: logAnalyticsStorageInsightsImporter,
+ },
Schema: map[string]*schema.Schema{
"name": {
@@ -47,8 +45,6 @@ func resourceArmLogAnalyticsStorageInsights() *schema.Resource {
ValidateFunc: validate.LogAnalyticsStorageInsightsName,
},
- // must ignore case since API lowercases all returned data
- // IssueP https://github.com/Azure/azure-sdk-for-go/issues/13268
"resource_group_name": azure.SchemaResourceGroupName(),
"workspace_id": {
@@ -156,9 +152,6 @@ func resourceArmLogAnalyticsStorageInsightsRead(d *schema.ResourceData, meta int
return err
}
- // Need to pull this from the config since the API does not return this value
- storageAccountKey := d.Get("storage_account_key").(string)
-
resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
@@ -180,7 +173,6 @@ func resourceArmLogAnalyticsStorageInsightsRead(d *schema.ResourceData, meta int
storageAccountId = *props.StorageAccount.ID
}
d.Set("storage_account_id", storageAccountId)
- d.Set("storage_account_key", storageAccountKey)
d.Set("table_names", utils.FlattenStringSlice(props.Tables))
}
From 631b019aef7ef87acc79a0062e335ae2db9a01bd Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 5 Nov 2020 15:10:27 -0800
Subject: [PATCH 18/46] ToLower'ed WorkspaceId
---
.../parse/log_analytics_workspace.go | 4 ++-
...nalytics_storage_insights_resource_test.go | 34 +++++++++++++++++++
2 files changed, 37 insertions(+), 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go b/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go
index a9c9b59ac17d..54e6d0032e95 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_workspace.go
@@ -2,6 +2,7 @@ package parse
import (
"fmt"
+ "strings"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
)
@@ -20,7 +21,8 @@ func NewLogAnalyticsWorkspaceID(name, resourceGroup string) LogAnalyticsWorkspac
func (id LogAnalyticsWorkspaceId) ID(subscriptionId string) string {
// Log Analytics ID ignores casing
- return fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/microsoft.operationalinsights/workspaces/%s", subscriptionId, id.ResourceGroup, id.Name)
+ // Issue tracked here - https://github.com/Azure/azure-sdk-for-go/issues/13268
+ return fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/microsoft.operationalinsights/workspaces/%s", subscriptionId, strings.ToLower(id.ResourceGroup), strings.ToLower(id.Name))
}
func LogAnalyticsWorkspaceID(input string) (*LogAnalyticsWorkspaceId, error) {
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
index 1aafdcd6195e..8a1a35faae9e 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_storage_insights_resource_test.go
@@ -30,6 +30,24 @@ func TestAccAzureRMLogAnalyticsStorageInsights_basic(t *testing.T) {
})
}
+func TestAccAzureRMLogAnalyticsStorageInsights_basicUppercase(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsStorageInsightsDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsStorageInsights_basicUppercase(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsStorageInsightsExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep("storage_account_key"), // key is not returned by the API
+ },
+ })
+}
+
func TestAccAzureRMLogAnalyticsStorageInsights_requiresImport(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_storage_insights", "test")
resource.ParallelTest(t, resource.TestCase{
@@ -212,6 +230,22 @@ resource "azurerm_log_analytics_storage_insights" "test" {
`, template, data.RandomInteger)
}
+func testAccAzureRMLogAnalyticsStorageInsights_basicUppercase(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsStorageInsights_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_storage_insights" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ workspace_id = azurerm_log_analytics_workspace.test.id
+
+ storage_account_id = azurerm_storage_account.test.id
+ storage_account_key = azurerm_storage_account.test.primary_access_key
+}
+`, template, data.RandomInteger)
+}
+
func testAccAzureRMLogAnalyticsStorageInsights_requiresImport(data acceptance.TestData) string {
config := testAccAzureRMLogAnalyticsStorageInsights_basic(data)
return fmt.Sprintf(`
From aef745ae0884b4eccf13d875c14daf25b97a5381 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 14:26:25 -0800
Subject: [PATCH 19/46] Update schema
---
.../log_analytics_clusters_resource.go | 154 +++++----
.../suppress/log_analytics_cluster.go | 31 ++
.../log_analytics_cluster_resource_test.go | 321 ++++++++++++++++++
.../validate/log_analytics_cluster.go | 24 +-
.../validate/log_analytics_cluster_test.go | 29 +-
website/azurerm.erb | 4 +
.../r/log_analytics_cluster.html.markdown | 29 +-
7 files changed, 473 insertions(+), 119 deletions(-)
create mode 100644 azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go
create mode 100644 azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
diff --git a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
index fead449c7289..4742e739a4e3 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
@@ -6,6 +6,7 @@ import (
"time"
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
@@ -13,6 +14,7 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/suppress"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
@@ -28,9 +30,9 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
Delete: resourceArmLogAnalyticsClusterDelete,
Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(30 * time.Minute),
+ Create: schema.DefaultTimeout(6 * time.Hour),
Read: schema.DefaultTimeout(5 * time.Minute),
- Update: schema.DefaultTimeout(30 * time.Minute),
+ Update: schema.DefaultTimeout(6 * time.Hour),
Delete: schema.DefaultTimeout(30 * time.Minute),
},
@@ -53,7 +55,7 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
"identity": {
Type: schema.TypeList,
- Optional: true,
+ Required: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
@@ -64,7 +66,6 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
ForceNew: true,
ValidateFunc: validation.StringInSlice([]string{
string(operationalinsights.SystemAssigned),
- string(operationalinsights.None),
}, false),
},
@@ -81,12 +82,6 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
},
},
- "next_link": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- },
-
"key_vault_property": {
Type: schema.TypeList,
Optional: true,
@@ -99,8 +94,9 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
},
"key_vault_uri": {
- Type: schema.TypeString,
- Optional: true,
+ Type: schema.TypeString,
+ Optional: true,
+ DiffSuppressFunc: suppress.LogAnalyticsClusterUrl,
},
"key_version": {
@@ -111,26 +107,17 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
},
},
- "sku": {
- Type: schema.TypeList,
+ // Per the documentation cluster capacity must start at 1000 GB and can go above 3000 GB with an exception by Microsoft
+ // so I am not limiting the upperbound here by design
+ // https://docs.microsoft.com/en-us/azure/azure-monitor/platform/manage-cost-storage#log-analytics-dedicated-clusters
+ "size_gb": {
+ Type: schema.TypeInt,
Optional: true,
- MaxItems: 1,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "name": {
- Type: schema.TypeString,
- Optional: true,
- ValidateFunc: validation.StringInSlice([]string{
- string(operationalinsights.CapacityReservation),
- }, false),
- },
-
- "capacity": {
- Type: schema.TypeInt,
- Optional: true,
- },
- },
- },
+ Default: 1000,
+ ValidateFunc: validation.All(
+ validation.IntAtLeast(1000),
+ validation.IntDivisibleBy(100),
+ ),
},
"cluster_id": {
@@ -138,11 +125,6 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
Computed: true,
},
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
-
"tags": tags.Schema(),
},
}
@@ -162,19 +144,27 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
}
}
if existing.ID != nil && *existing.ID != "" {
- return tf.ImportAsExistsError("azurerm_operationalinsights_cluster", *existing.ID)
+ return tf.ImportAsExistsError("azurerm_log_analytics_cluster", *existing.ID)
+ }
+
+ keyVaultProps := expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
+
+ if d.IsNewResource() && keyVaultProps != nil {
+ return fmt.Errorf("the Log Analytics Cluster %q (Resource Group %q) must be successfully provisioned before it can be configured to support customer managed keys", name, resourceGroup)
+ }
+
+ sku := &operationalinsights.ClusterSku{
+ Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
+ Name: operationalinsights.CapacityReservation,
}
parameters := operationalinsights.Cluster{
Location: utils.String(location.Normalize(d.Get("location").(string))),
Identity: expandArmLogAnalyticsClusterIdentity(d.Get("identity").([]interface{})),
- ClusterProperties: &operationalinsights.ClusterProperties{
- NextLink: utils.String(d.Get("next_link").(string)),
- KeyVaultProperties: expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{})),
- },
- Sku: expandArmLogAnalyticsClusterClusterSku(d.Get("sku").([]interface{})),
- Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
+ Sku: sku,
+ Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
}
+
future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters)
if err != nil {
return fmt.Errorf("creating Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
@@ -226,13 +216,10 @@ func resourceArmLogAnalyticsClusterRead(d *schema.ResourceData, meta interface{}
if err := d.Set("key_vault_property", flattenArmLogAnalyticsKeyVaultProperties(props.KeyVaultProperties)); err != nil {
return fmt.Errorf("setting `key_vault_property`: %+v", err)
}
- d.Set("next_link", props.NextLink)
d.Set("cluster_id", props.ClusterID)
}
- if err := d.Set("sku", flattenArmLogAnalyticsClusterSku(resp.Sku)); err != nil {
- return fmt.Errorf("setting `sku`: %+v", err)
- }
- d.Set("type", resp.Type)
+ d.Set("size_gb", flattenArmLogAnalyticsClusterSku(resp.Sku))
+
return tags.FlattenAndSet(d, resp.Tags)
}
@@ -249,12 +236,18 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
parameters := operationalinsights.ClusterPatch{
ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{},
}
+
if d.HasChange("key_vault_property") {
parameters.ClusterPatchProperties.KeyVaultProperties = expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
}
- if d.HasChange("sku") {
- parameters.Sku = expandArmLogAnalyticsClusterClusterSku(d.Get("sku").([]interface{}))
+
+ if d.HasChange("size_gb") {
+ parameters.Sku = &operationalinsights.ClusterSku{
+ Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
+ Name: operationalinsights.CapacityReservation,
+ }
}
+
if d.HasChange("tags") {
parameters.Tags = tags.Expand(d.Get("tags").(map[string]interface{}))
}
@@ -262,6 +255,36 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
if _, err := client.Update(ctx, id.ResourceGroup, id.Name, parameters); err != nil {
return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
}
+
+ // Need to wait for the cluster to actually finish updating the resource before continuing
+ // since the service returns a 200 instantly while it's still updating in the background
+ log.Printf("[INFO] Checking for Log Analytics Cluster provisioning state", id.Name)
+
+ updateWait := &resource.StateChangeConf{
+ Pending: []string{string(operationalinsights.Updating)},
+ Target: []string{string(operationalinsights.Succeeded)},
+ MinTimeout: 1 * time.Minute,
+ Timeout: d.Timeout(schema.TimeoutUpdate),
+ Refresh: func() (interface{}, string, error) {
+ log.Printf("[INFO] checking on state of Log Analytics Cluster %q", id.Name)
+
+ resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
+ if err != nil {
+ return nil, "nil", fmt.Errorf("polling for the status of Log Analytics Cluster %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
+ }
+
+ if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded {
+ return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encounterd: %q", id.Name, id.ResourceGroup, string(resp.ClusterProperties.ProvisioningState))
+ }
+
+ return resp, string(resp.ClusterProperties.ProvisioningState), nil
+ },
+ }
+
+ if _, err := updateWait.WaitForState(); err != nil {
+ return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
+ }
+
return resourceArmLogAnalyticsClusterRead(d, meta)
}
@@ -308,17 +331,6 @@ func expandArmLogAnalyticsClusterKeyVaultProperties(input []interface{}) *operat
}
}
-func expandArmLogAnalyticsClusterClusterSku(input []interface{}) *operationalinsights.ClusterSku {
- if len(input) == 0 {
- return nil
- }
- v := input[0].(map[string]interface{})
- return &operationalinsights.ClusterSku{
- Capacity: utils.Int64(int64(v["capacity"].(int))),
- Name: operationalinsights.ClusterSkuNameEnum(v["name"].(string)),
- }
-}
-
func flattenArmLogAnalyticsIdentity(input *operationalinsights.Identity) []interface{} {
if input == nil {
return make([]interface{}, 0)
@@ -371,23 +383,15 @@ func flattenArmLogAnalyticsKeyVaultProperties(input *operationalinsights.KeyVaul
}
}
-func flattenArmLogAnalyticsClusterSku(input *operationalinsights.ClusterSku) []interface{} {
+func flattenArmLogAnalyticsClusterSku(input *operationalinsights.ClusterSku) interface{} {
if input == nil {
- return make([]interface{}, 0)
+ return nil
}
- var name operationalinsights.ClusterSkuNameEnum
- if input.Name != "" {
- name = input.Name
- }
- var capacity int64
+ var capacity int
if input.Capacity != nil {
- capacity = *input.Capacity
- }
- return []interface{}{
- map[string]interface{}{
- "name": name,
- "capacity": capacity,
- },
+ capacity = int(*input.Capacity)
}
+
+ return []interface{}{capacity}
}
diff --git a/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go
new file mode 100644
index 000000000000..af27f0f3eb54
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go
@@ -0,0 +1,31 @@
+package suppress
+
+import (
+ "fmt"
+ "log"
+ "net"
+ "net/url"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+func LogAnalyticsClusterUrl(_, old, new string, _ *schema.ResourceData) bool {
+ // verify the uri is valid
+ log.Printf("[INFO] Suppress Log Analytics Cluster URI: %s", old)
+ u, err := url.ParseRequestURI(old)
+ if err != nil || u.Host == "" {
+ return false
+ }
+
+ host, _, err := net.SplitHostPort(u.Host)
+ if err != nil {
+ host = u.Host
+ }
+
+ log.Printf("[INFO] Suppress Log Analytics Cluster URI: %s == %s", new, fmt.Sprintf("%s://%s/", u.Scheme, host))
+ if new == fmt.Sprintf("%s://%s/", u.Scheme, host) {
+ return true
+ }
+
+ return false
+}
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
new file mode 100644
index 000000000000..ff135a98ce6d
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -0,0 +1,321 @@
+package tests
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
+)
+
+func TestAccAzureRMoperationalinsightsCluster_basic(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMoperationalinsightsCluster_requiresImport(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.RequiresImportErrorStep(testAccAzureRMoperationalinsightsCluster_requiresImport),
+ },
+ })
+}
+
+func TestAccAzureRMoperationalinsightsCluster_complete(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMoperationalinsightsCluster_update(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_basic(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMoperationalinsightsCluster_updateKeyVaultProperties(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_updateKeyVaultProperties(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func TestAccAzureRMoperationalinsightsCluster_updateSku(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ {
+ Config: testAccAzureRMoperationalinsightsCluster_updateSku(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func testCheckAzureRMoperationalinsightsClusterExists(resourceName string) resource.TestCheckFunc {
+ return func(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
+ ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
+ rs, ok := s.RootModule().Resources[resourceName]
+ if !ok {
+ return fmt.Errorf("operationalinsights Cluster not found: %s", resourceName)
+ }
+ id, err := parse.OperationalinsightsClusterID(rs.Primary.ID)
+ if err != nil {
+ return err
+ }
+ if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil {
+ if !utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("bad: log_analytics Cluster %q does not exist", id.Name)
+ }
+ return fmt.Errorf("bad: Get on LogAnalytics.ClusterClient: %+v", err)
+ }
+ return nil
+ }
+}
+
+func testCheckAzureRMoperationalinsightsClusterDestroy(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
+ ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
+
+ for _, rs := range s.RootModule().Resources {
+ if rs.Type != "azurerm_log_analytics_cluster" {
+ continue
+ }
+ id, err := parse.OperationalinsightsClusterID(rs.Primary.ID)
+ if err != nil {
+ return err
+ }
+ if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil {
+ if !utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("bad: Get on LogAnalytics.ClusterClient: %+v", err)
+ }
+ }
+ return nil
+ }
+ return nil
+}
+
+func testAccAzureRMoperationalinsightsCluster_template(data acceptance.TestData) string {
+ return fmt.Sprintf(`
+provider "azurerm" {
+ features {}
+}
+
+resource "azurerm_resource_group" "test" {
+ name = "acctestRG-la-%d"
+ location = "%s"
+}
+`, data.RandomInteger, data.Locations.Primary)
+}
+
+func testAccAzureRMoperationalinsightsCluster_basic(data acceptance.TestData) string {
+ template := testAccAzureRMoperationalinsightsCluster_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+}
+`, template, data.RandomInteger)
+}
+
+func testAccAzureRMoperationalinsightsCluster_requiresImport(data acceptance.TestData) string {
+ config := testAccAzureRMoperationalinsightsCluster_basic(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "import" {
+ name = azurerm_log_analytics_cluster.test.name
+ resource_group_name = azurerm_log_analytics_cluster.test.resource_group_name
+ location = azurerm_log_analytics_cluster.test.location
+}
+`, config)
+}
+
+func testAccAzureRMoperationalinsightsCluster_complete(data acceptance.TestData) string {
+ template := testAccAzureRMoperationalinsightsCluster_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+ identity {
+ type = ""
+ }
+
+ next_link = ""
+
+ key_vault_property {
+ key_name = ""
+ key_vault_uri = ""
+ key_version = ""
+ }
+
+ size_gb = 1100
+
+ tags = {
+ ENV = "Test"
+ }
+}
+`, template, data.RandomInteger)
+}
+
+func testAccAzureRMoperationalinsightsCluster_updateKeyVaultProperties(data acceptance.TestData) string {
+ template := testAccAzureRMoperationalinsightsCluster_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+ identity {
+ type = ""
+ }
+
+ next_link = ""
+
+ key_vault_property {
+ key_name = ""
+ key_vault_uri = ""
+ key_version = ""
+ }
+
+ size_gb = 1000
+
+ tags = {
+ ENV = "Test"
+ }
+}
+`, template, data.RandomInteger)
+}
+
+func testAccAzureRMoperationalinsightsCluster_updateSku(data acceptance.TestData) string {
+ template := testAccAzureRMoperationalinsightsCluster_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+ identity {
+ type = ""
+ }
+
+ next_link = ""
+
+ key_vault_property {
+ key_name = ""
+ key_vault_uri = ""
+ key_version = ""
+ }
+
+ size_gb = 1000
+
+ tags = {
+ ENV = "Test"
+ }
+}
+`, template, data.RandomInteger)
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
index 3e88c5236868..2ef7ffbb4a00 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
@@ -1,27 +1,5 @@
package validate
-import (
- "fmt"
- "regexp"
-)
-
func LogAnalyticsClustersName(i interface{}, k string) (warnings []string, errors []error) {
- v, ok := i.(string)
- if !ok {
- errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
- return
- }
- if len(v) < 4 {
- errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
- return
- }
- if len(v) > 63 {
- errors = append(errors, fmt.Errorf("length should be less than %d", 63))
- return
- }
- if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
- errors = append(errors, fmt.Errorf("expected value of %s not match regular expression, got %v", k, v))
- return
- }
- return
+ return logAnalyticsGenericName(i, k)
}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go
index 4b01554f917e..c7d531596c65 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster_test.go
@@ -6,47 +6,64 @@ import (
func TestLogAnalyticsClustersName(t *testing.T) {
testCases := []struct {
+ Name string
Input string
Expected bool
}{
{
+ Name: "Too short",
Input: "inv",
Expected: false,
},
{
- Input: "invalid_Cluster_Name",
+ Name: "Invalid characters underscores",
+ Input: "invalid_Clusters_Name",
Expected: false,
},
{
- Input: "invalid Cluster Name",
+ Name: "Invalid characters space",
+ Input: "invalid Clusters Name",
Expected: false,
},
{
- Input: "-invalidClusterName",
+ Name: "Invalid name starts with hyphen",
+ Input: "-invalidClustersName",
Expected: false,
},
{
- Input: "invalidClusterName-",
+ Name: "Invalid name ends with hyphen",
+ Input: "invalidClustersName-",
Expected: false,
},
{
- Input: "validClusterName",
+ Name: "Invalid name too long",
+ Input: "thisIsToLoooooooooooooooooooooooooooooooooooooongForAClusterName",
+ Expected: false,
+ },
+ {
+ Name: "Valid name",
+ Input: "validClustersName",
Expected: true,
},
{
- Input: "validClusterName-2",
+ Name: "Valid name with hyphen",
+ Input: "validClustersName-2",
Expected: true,
},
{
+ Name: "Valid name max length",
Input: "thisIsTheLooooooooooooooooooooooooongestValidClusterNameThereIs",
Expected: true,
},
{
+ Name: "Valid name min length",
Input: "vali",
Expected: true,
},
}
for _, v := range testCases {
+ t.Logf("[DEBUG] Testing %q..", v.Name)
+
_, errors := LogAnalyticsClustersName(v.Input, "name")
result := len(errors) == 0
if result != v.Expected {
diff --git a/website/azurerm.erb b/website/azurerm.erb
index f67f28b0e576..631ea4404566 100644
--- a/website/azurerm.erb
+++ b/website/azurerm.erb
@@ -1999,6 +1999,10 @@
Log Analytics Resources
+ -
+ azurerm_log_analytics_cluster
+
+
-
azurerm_log_analytics_data_export_rule
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
index 911e03d0c1ba..2759df2d3317 100644
--- a/website/docs/r/log_analytics_cluster.html.markdown
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -8,6 +8,9 @@ description: |-
# azurerm_log_analytics_cluster
+
+~> **Important** Due to capacity constraints, Microsoft requires you to pre-register your subscription IDs before you are allowed to create a Log Analytics cluster. Contact Microsoft, or open a support request to register your subscription IDs.
+
Manages a Log Analytics Cluster.
## Example Usage
@@ -26,6 +29,10 @@ resource "azurerm_log_analytics_cluster" "example" {
name = "example-cluster"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
+
+ identity {
+ type = "SystemAssigned"
+ }
}
```
@@ -39,15 +46,13 @@ The following arguments are supported:
* `location` - (Required) The Azure Region where the Log Analytics Cluster should exist. Changing this forces a new Log Analytics Cluster to be created.
----
-
-* `identity` - (Optional) A `identity` block as defined below. Changing this forces a new Log Analytics Cluster to be created.
-
-* `next_link` - (Optional) The link used to get the next page of recommendations. Changing this forces a new Log Analytics Cluster to be created.
+* `identity` - (Required) A `identity` block as defined below. Changing this forces a new Log Analytics Cluster to be created.
* `key_vault_property` - (Optional) A `key_vault_property` block as defined below.
-* `sku` - (Optional) A `sku` block as defined below.
+* `size_gb` - (Optional) The capacity of the Log Analytics Cluster specified in GB/day. Defaults to 1000.
+
+~> **NOTE:** The `size_gb` can be in the range of 1000 to 3000 GB per day and must be in steps of 100 GB. For `size_gb` levels higher than 3000 GB per day, please contact your Microsoft contact to enable it.
* `tags` - (Optional) A mapping of tags which should be assigned to the Log Analytics Cluster.
@@ -69,13 +74,7 @@ An `key_vault_property` block exports the following:
* `key_version` - (Optional) The version of the key associated with the Log Analytics cluster.
----
-
-An `sku` block exports the following:
-
-* `name` - (Optional) The name which should be used for this sku. Possible value is "CapacityReservation" is allowed.
-
-* `capacity` - (Optional) The capacity value.
+~> **NOTE:** You must first successfully provision a Log Analytics cluster before you can configure the Log Analytics cluster for Customer-Managed Keys by defining a `key_vault_property` block. Customer-Managed Key capability is regional. Your Azure Key Vault, cluster and linked Log Analytics workspaces must be in the same region, but they can be in different subscriptions.
## Attributes Reference
@@ -103,9 +102,9 @@ An `identity` block exports the following:
The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
-* `create` - (Defaults to 30 minutes) Used when creating the Log Analytics Cluster.
+* `create` - (Defaults to 6 hours) Used when creating the Log Analytics Cluster.
* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Cluster.
-* `update` - (Defaults to 30 minutes) Used when updating the Log Analytics Cluster.
+* `update` - (Defaults to 6 hours) Used when updating the Log Analytics Cluster.
* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Cluster.
## Import
From b20be14bd0ad477e544f83ca6f95fda08e24252b Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 16:10:25 -0800
Subject: [PATCH 20/46] Finish Test Cases
---
.../log_analytics_cluster_resource_test.go | 245 +++++++-----------
1 file changed, 92 insertions(+), 153 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index ff135a98ce6d..daf377de9e8c 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -11,7 +11,7 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
-func TestAccAzureRMoperationalinsightsCluster_basic(t *testing.T) {
+func TestAccAzureRMLogAnalyticsCluster_basic(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
@@ -19,7 +19,7 @@ func TestAccAzureRMoperationalinsightsCluster_basic(t *testing.T) {
CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMoperationalinsightsCluster_basic(data),
+ Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
),
@@ -29,7 +29,7 @@ func TestAccAzureRMoperationalinsightsCluster_basic(t *testing.T) {
})
}
-func TestAccAzureRMoperationalinsightsCluster_requiresImport(t *testing.T) {
+func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
@@ -37,17 +37,17 @@ func TestAccAzureRMoperationalinsightsCluster_requiresImport(t *testing.T) {
CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMoperationalinsightsCluster_basic(data),
+ Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
),
},
- data.RequiresImportErrorStep(testAccAzureRMoperationalinsightsCluster_requiresImport),
+ data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsCluster_requiresImport),
},
})
}
-func TestAccAzureRMoperationalinsightsCluster_complete(t *testing.T) {
+func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
@@ -55,89 +55,14 @@ func TestAccAzureRMoperationalinsightsCluster_complete(t *testing.T) {
CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMoperationalinsightsCluster_complete(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep(),
- },
- })
-}
-
-func TestAccAzureRMoperationalinsightsCluster_update(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() { acceptance.PreCheck(t) },
- Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccAzureRMoperationalinsightsCluster_basic(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep(),
- {
- Config: testAccAzureRMoperationalinsightsCluster_complete(data),
+ Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
),
},
data.ImportStep(),
{
- Config: testAccAzureRMoperationalinsightsCluster_basic(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep(),
- },
- })
-}
-
-func TestAccAzureRMoperationalinsightsCluster_updateKeyVaultProperties(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() { acceptance.PreCheck(t) },
- Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccAzureRMoperationalinsightsCluster_complete(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep(),
- {
- Config: testAccAzureRMoperationalinsightsCluster_updateKeyVaultProperties(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep(),
- },
- })
-}
-
-func TestAccAzureRMoperationalinsightsCluster_updateSku(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() { acceptance.PreCheck(t) },
- Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccAzureRMoperationalinsightsCluster_complete(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep(),
- {
- Config: testAccAzureRMoperationalinsightsCluster_updateSku(data),
+ Config: testAccAzureRMLogAnalyticsCluster_complete(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
),
@@ -191,7 +116,7 @@ func testCheckAzureRMoperationalinsightsClusterDestroy(s *terraform.State) error
return nil
}
-func testAccAzureRMoperationalinsightsCluster_template(data acceptance.TestData) string {
+func testAccAzureRMLogAnalyticsCluster_template(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
@@ -204,118 +129,132 @@ resource "azurerm_resource_group" "test" {
`, data.RandomInteger, data.Locations.Primary)
}
-func testAccAzureRMoperationalinsightsCluster_basic(data acceptance.TestData) string {
- template := testAccAzureRMoperationalinsightsCluster_template(data)
+func testAccAzureRMLogAnalyticsCluster_basic(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsCluster_template(data)
return fmt.Sprintf(`
%s
resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
+ name = "acctest-LA-%d"
resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
+ location = azurerm_resource_group.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
}
`, template, data.RandomInteger)
}
-func testAccAzureRMoperationalinsightsCluster_requiresImport(data acceptance.TestData) string {
- config := testAccAzureRMoperationalinsightsCluster_basic(data)
+func testAccAzureRMLogAnalyticsCluster_requiresImport(data acceptance.TestData) string {
+ config := testAccAzureRMLogAnalyticsCluster_basic(data)
return fmt.Sprintf(`
%s
resource "azurerm_log_analytics_cluster" "import" {
- name = azurerm_log_analytics_cluster.test.name
+ name = azurerm_log_analytics_cluster.test.name
resource_group_name = azurerm_log_analytics_cluster.test.resource_group_name
- location = azurerm_log_analytics_cluster.test.location
+ location = azurerm_log_analytics_cluster.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
}
`, config)
}
-func testAccAzureRMoperationalinsightsCluster_complete(data acceptance.TestData) string {
- template := testAccAzureRMoperationalinsightsCluster_template(data)
+func testAccAzureRMLogAnalyticsCluster_complete(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsCluster_template(data)
return fmt.Sprintf(`
%s
-resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
- resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
- identity {
- type = ""
- }
+data "azurerm_client_config" "current" {}
- next_link = ""
-
- key_vault_property {
- key_name = ""
- key_vault_uri = ""
- key_version = ""
- }
+resource "azurerm_key_vault" "test" {
+ name = "vault%d"
+ location = azurerm_resource_group.test.location
+ resource_group_name = azurerm_resource_group.test.name
+ tenant_id = data.azurerm_client_config.current.tenant_id
- size_gb = 1100
+ sku_name = "premium"
- tags = {
- ENV = "Test"
- }
-}
-`, template, data.RandomInteger)
+ soft_delete_enabled = true
+ soft_delete_retention_days = 7
+ purge_protection_enabled = true
}
-func testAccAzureRMoperationalinsightsCluster_updateKeyVaultProperties(data acceptance.TestData) string {
- template := testAccAzureRMoperationalinsightsCluster_template(data)
- return fmt.Sprintf(`
-%s
+resource "azurerm_key_vault_key" "test" {
+ name = "key-%s""
+ key_vault_id = azurerm_key_vault.test.id
+ key_type = "RSA"
+ key_size = 2048
+
+ key_opts = [
+ "decrypt",
+ "encrypt",
+ "sign",
+ "unwrapKey",
+ "verify",
+ "wrapKey",
+ ]
+
+ depends_on = [azurerm_key_vault_access_policy.subscription]
+}
-resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
- resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
- identity {
- type = ""
- }
+resource "azurerm_key_vault_access_policy" "subscription" {
+ key_vault_id = azurerm_key_vault.test.id
+
+ key_permissions = [
+ "create",
+ "delete",
+ "get",
+ "update",
+ "list",
+ ]
+
+ secret_permissions = [
+ "get",
+ "delete",
+ "set",
+ ]
+
+ tenant_id = data.azurerm_client_config.current.tenant_id
+ object_id = data.azurerm_client_config.current.object_id
+}
- next_link = ""
+resource "azurerm_key_vault_access_policy" "test" {
+ key_vault_id = azurerm_key_vault.test.id
- key_vault_property {
- key_name = ""
- key_vault_uri = ""
- key_version = ""
- }
+ key_permissions = [
+ "get",
+ "unwrapkey",
+ "wrapkey"
+ ]
- size_gb = 1000
-
- tags = {
- ENV = "Test"
- }
-}
-`, template, data.RandomInteger)
+ tenant_id = azurerm_log_analytics_cluster.example.identity.0.tenant_id
+ object_id = azurerm_log_analytics_cluster.example.identity.0.principal_id
}
-func testAccAzureRMoperationalinsightsCluster_updateSku(data acceptance.TestData) string {
- template := testAccAzureRMoperationalinsightsCluster_template(data)
- return fmt.Sprintf(`
-%s
-
resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
+ name = "acctest-LA-%d"
resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
+ location = azurerm_resource_group.test.location
+
identity {
- type = ""
+ type = "SystemAssigned"
}
- next_link = ""
-
key_vault_property {
- key_name = ""
- key_vault_uri = ""
- key_version = ""
+ key_name = azurerm_key_vault_key.test.name
+ key_vault_uri = azurerm_key_vault.test.vault_uri
+ key_version = azurerm_key_vault_key.test.version
}
- size_gb = 1000
+ size_gb = 1100
tags = {
ENV = "Test"
}
}
-`, template, data.RandomInteger)
+`, template, data.RandomInteger, data.RandomString, data.RandomInteger)
}
From 7f9154fcc227a48891e94cb97201a87e1e1926e8 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 16:17:22 -0800
Subject: [PATCH 21/46] Update test names
---
.../log_analytics_cluster_resource_test.go | 27 ++++++++++---------
1 file changed, 14 insertions(+), 13 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index daf377de9e8c..05a101424f4b 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -8,6 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
@@ -16,12 +17,12 @@ func TestAccAzureRMLogAnalyticsCluster_basic(t *testing.T) {
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
data.ImportStep(),
@@ -34,12 +35,12 @@ func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsCluster_requiresImport),
@@ -52,19 +53,19 @@ func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMoperationalinsightsClusterDestroy,
+ CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
Steps: []resource.TestStep{
{
Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
data.ImportStep(),
{
Config: testAccAzureRMLogAnalyticsCluster_complete(data),
Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMoperationalinsightsClusterExists(data.ResourceName),
+ testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
data.ImportStep(),
@@ -72,21 +73,21 @@ func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
})
}
-func testCheckAzureRMoperationalinsightsClusterExists(resourceName string) resource.TestCheckFunc {
+func testCheckAzureRMLogAnalyticsClusterExists(resourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
rs, ok := s.RootModule().Resources[resourceName]
if !ok {
- return fmt.Errorf("operationalinsights Cluster not found: %s", resourceName)
+ return fmt.Errorf("log analytics Cluster not found: %s", resourceName)
}
- id, err := parse.OperationalinsightsClusterID(rs.Primary.ID)
+ id, err := parse.LogAnalyticsClusterID(rs.Primary.ID)
if err != nil {
return err
}
if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil {
if !utils.ResponseWasNotFound(resp.Response) {
- return fmt.Errorf("bad: log_analytics Cluster %q does not exist", id.Name)
+ return fmt.Errorf("bad: log analytics Cluster %q does not exist", id.Name)
}
return fmt.Errorf("bad: Get on LogAnalytics.ClusterClient: %+v", err)
}
@@ -94,7 +95,7 @@ func testCheckAzureRMoperationalinsightsClusterExists(resourceName string) resou
}
}
-func testCheckAzureRMoperationalinsightsClusterDestroy(s *terraform.State) error {
+func testCheckAzureRMLogAnalyticsClusterDestroy(s *terraform.State) error {
client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
@@ -102,7 +103,7 @@ func testCheckAzureRMoperationalinsightsClusterDestroy(s *terraform.State) error
if rs.Type != "azurerm_log_analytics_cluster" {
continue
}
- id, err := parse.OperationalinsightsClusterID(rs.Primary.ID)
+ id, err := parse.LogAnalyticsClusterID(rs.Primary.ID)
if err != nil {
return err
}
From b90c49f2151025f02cc722265d78a2b1674ee118 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 21:01:29 -0800
Subject: [PATCH 22/46] Update test cases
---
.../services/loganalytics/suppress/log_analytics_cluster.go | 4 ----
.../tests/log_analytics_cluster_resource_test.go | 6 +++---
2 files changed, 3 insertions(+), 7 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go
index af27f0f3eb54..5feda882b175 100644
--- a/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go
+++ b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster.go
@@ -2,7 +2,6 @@ package suppress
import (
"fmt"
- "log"
"net"
"net/url"
@@ -10,8 +9,6 @@ import (
)
func LogAnalyticsClusterUrl(_, old, new string, _ *schema.ResourceData) bool {
- // verify the uri is valid
- log.Printf("[INFO] Suppress Log Analytics Cluster URI: %s", old)
u, err := url.ParseRequestURI(old)
if err != nil || u.Host == "" {
return false
@@ -22,7 +19,6 @@ func LogAnalyticsClusterUrl(_, old, new string, _ *schema.ResourceData) bool {
host = u.Host
}
- log.Printf("[INFO] Suppress Log Analytics Cluster URI: %s == %s", new, fmt.Sprintf("%s://%s/", u.Scheme, host))
if new == fmt.Sprintf("%s://%s/", u.Scheme, host) {
return true
}
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index 05a101424f4b..45530ef61998 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -25,7 +25,7 @@ func TestAccAzureRMLogAnalyticsCluster_basic(t *testing.T) {
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("size_gb"), // not returned by the API
},
})
}
@@ -61,14 +61,14 @@ func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("size_gb"), // not returned by the API
{
Config: testAccAzureRMLogAnalyticsCluster_complete(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep(),
+ data.ImportStep("size_gb"), // not returned by the API
},
})
}
From a229c2fb12d12f72f283b3d9bff53c07353063bb Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 21:39:00 -0800
Subject: [PATCH 23/46] Fix test lint issue
---
.../loganalytics/tests/log_analytics_cluster_resource_test.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index 45530ef61998..e7dae8ce4521 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -185,7 +185,7 @@ resource "azurerm_key_vault" "test" {
}
resource "azurerm_key_vault_key" "test" {
- name = "key-%s""
+ name = "key-%s"
key_vault_id = azurerm_key_vault.test.id
key_type = "RSA"
key_size = 2048
From 7fb34d9803901d0d81785075ddfaefe6bd944846 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 21:45:49 -0800
Subject: [PATCH 24/46] Another lint issue
---
.../services/loganalytics/log_analytics_clusters_resource.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
index 4742e739a4e3..72d2487adae6 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
@@ -258,7 +258,7 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
// Need to wait for the cluster to actually finish updating the resource before continuing
// since the service returns a 200 instantly while it's still updating in the background
- log.Printf("[INFO] Checking for Log Analytics Cluster provisioning state", id.Name)
+ log.Printf("[INFO] Checking for Log Analytics Cluster provisioning state")
updateWait := &resource.StateChangeConf{
Pending: []string{string(operationalinsights.Updating)},
@@ -274,7 +274,7 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
}
if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded {
- return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encounterd: %q", id.Name, id.ResourceGroup, string(resp.ClusterProperties.ProvisioningState))
+ return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encountered: %q", id.Name, id.ResourceGroup, string(resp.ClusterProperties.ProvisioningState))
}
return resp, string(resp.ClusterProperties.ProvisioningState), nil
From c83f74560bd323f37222d6473558b301d45bd048 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 22:19:28 -0800
Subject: [PATCH 25/46] Add nolint exception for R001
---
.../apimanagement/api_management_custom_domain_resource.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go
index 12215fe9d39b..6a9a9f95fd15 100644
--- a/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go
+++ b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go
@@ -189,6 +189,7 @@ func apiManagementCustomDomainRead(d *schema.ResourceData, meta interface{}) err
configs := flattenApiManagementHostnameConfiguration(resp.ServiceProperties.HostnameConfigurations, d)
for _, config := range configs {
for key, v := range config.(map[string]interface{}) {
+ // nolint R001: ResourceData.Set() key argument should be string literal
if err := d.Set(key, v); err != nil {
return fmt.Errorf("setting `hostname_configuration` %q: %+v", key, err)
}
From f342d34d1b5cea1dcdd574ded75e7597e6b77c70 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Wed, 11 Nov 2020 22:34:02 -0800
Subject: [PATCH 26/46] Terrafmt
---
.../tests/log_analytics_cluster_resource_test.go | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index e7dae8ce4521..a8b9b0a5f337 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -204,7 +204,7 @@ resource "azurerm_key_vault_key" "test" {
resource "azurerm_key_vault_access_policy" "subscription" {
key_vault_id = azurerm_key_vault.test.id
-
+
key_permissions = [
"create",
"delete",
@@ -227,8 +227,8 @@ resource "azurerm_key_vault_access_policy" "test" {
key_vault_id = azurerm_key_vault.test.id
key_permissions = [
- "get",
- "unwrapkey",
+ "get",
+ "unwrapkey",
"wrapkey"
]
From 80bbf109a090416c94ae6eb9609c01c15ff309ae Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 12 Nov 2020 02:24:09 -0800
Subject: [PATCH 27/46] Update test
---
.../log_analytics_cluster_resource_test.go | 117 +++++++++++-------
1 file changed, 69 insertions(+), 48 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index a8b9b0a5f337..e43e0825fb9b 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -56,7 +56,7 @@ func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsCluster_basic(data),
+ Config: testAccAzureRMLogAnalyticsCluster_basicWithKeyVault(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
@@ -130,45 +130,8 @@ resource "azurerm_resource_group" "test" {
`, data.RandomInteger, data.Locations.Primary)
}
-func testAccAzureRMLogAnalyticsCluster_basic(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsCluster_template(data)
- return fmt.Sprintf(`
-%s
-
-resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
- resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
-
- identity {
- type = "SystemAssigned"
- }
-}
-`, template, data.RandomInteger)
-}
-
-func testAccAzureRMLogAnalyticsCluster_requiresImport(data acceptance.TestData) string {
- config := testAccAzureRMLogAnalyticsCluster_basic(data)
+func testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data acceptance.TestData) string {
return fmt.Sprintf(`
-%s
-
-resource "azurerm_log_analytics_cluster" "import" {
- name = azurerm_log_analytics_cluster.test.name
- resource_group_name = azurerm_log_analytics_cluster.test.resource_group_name
- location = azurerm_log_analytics_cluster.test.location
-
- identity {
- type = "SystemAssigned"
- }
-}
-`, config)
-}
-
-func testAccAzureRMLogAnalyticsCluster_complete(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsCluster_template(data)
- return fmt.Sprintf(`
-%s
-
data "azurerm_client_config" "current" {}
resource "azurerm_key_vault" "test" {
@@ -232,9 +195,73 @@ resource "azurerm_key_vault_access_policy" "test" {
"wrapkey"
]
- tenant_id = azurerm_log_analytics_cluster.example.identity.0.tenant_id
- object_id = azurerm_log_analytics_cluster.example.identity.0.principal_id
+ tenant_id = azurerm_log_analytics_cluster.test.identity.0.tenant_id
+ object_id = azurerm_log_analytics_cluster.test.identity.0.principal_id
}
+`, data.RandomInteger, data.RandomString)
+}
+
+func testAccAzureRMLogAnalyticsCluster_basic(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsCluster_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
+}
+`, template, data.RandomInteger)
+}
+
+func testAccAzureRMLogAnalyticsCluster_basicWithKeyVault(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsCluster_template(data)
+ keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
+ return fmt.Sprintf(`
+%s
+
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
+}
+`, template, keyVaultTemplate, data.RandomInteger)
+}
+
+func testAccAzureRMLogAnalyticsCluster_requiresImport(data acceptance.TestData) string {
+ config := testAccAzureRMLogAnalyticsCluster_basic(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster" "import" {
+ name = azurerm_log_analytics_cluster.test.name
+ resource_group_name = azurerm_log_analytics_cluster.test.resource_group_name
+ location = azurerm_log_analytics_cluster.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
+}
+`, config)
+}
+
+func testAccAzureRMLogAnalyticsCluster_complete(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsCluster_template(data)
+ keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
+ return fmt.Sprintf(`
+%s
+
+%s
resource "azurerm_log_analytics_cluster" "test" {
name = "acctest-LA-%d"
@@ -250,12 +277,6 @@ resource "azurerm_log_analytics_cluster" "test" {
key_vault_uri = azurerm_key_vault.test.vault_uri
key_version = azurerm_key_vault_key.test.version
}
-
- size_gb = 1100
-
- tags = {
- ENV = "Test"
- }
}
-`, template, data.RandomInteger, data.RandomString, data.RandomInteger)
+`, template, keyVaultTemplate, data.RandomInteger)
}
From 810edc7fcac3be8a13e69aec262967ebe7db1c0b Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 12 Nov 2020 13:16:07 -0800
Subject: [PATCH 28/46] Updates per PR comments
---
.../api_management_custom_domain_resource.go | 1 -
.../log_analytics_clusters_resource.go | 33 ++++++++++---------
.../parse/log_analytics_cluster.go | 14 ++++++++
3 files changed, 32 insertions(+), 16 deletions(-)
diff --git a/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go
index 6a9a9f95fd15..12215fe9d39b 100644
--- a/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go
+++ b/azurerm/internal/services/apimanagement/api_management_custom_domain_resource.go
@@ -189,7 +189,6 @@ func apiManagementCustomDomainRead(d *schema.ResourceData, meta interface{}) err
configs := flattenApiManagementHostnameConfiguration(resp.ServiceProperties.HostnameConfigurations, d)
for _, config := range configs {
for key, v := range config.(map[string]interface{}) {
- // nolint R001: ResourceData.Set() key argument should be string literal
if err := d.Set(key, v); err != nil {
return fmt.Errorf("setting `hostname_configuration` %q: %+v", key, err)
}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
index 72d2487adae6..c0b6c673fa07 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
@@ -131,11 +131,14 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
}
func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ subscriptionId := meta.(*clients.Client).Account.SubscriptionId
ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d)
defer cancel()
name := d.Get("name").(string)
resourceGroup := d.Get("resource_group_name").(string)
+ keyVaultProps := expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
+ id := parse.NewLogAnalyticsClusterId(name, resourceGroup)
existing, err := client.Get(ctx, resourceGroup, name)
if err != nil {
@@ -147,12 +150,6 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
return tf.ImportAsExistsError("azurerm_log_analytics_cluster", *existing.ID)
}
- keyVaultProps := expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
-
- if d.IsNewResource() && keyVaultProps != nil {
- return fmt.Errorf("the Log Analytics Cluster %q (Resource Group %q) must be successfully provisioned before it can be configured to support customer managed keys", name, resourceGroup)
- }
-
sku := &operationalinsights.ClusterSku{
Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
Name: operationalinsights.CapacityReservation,
@@ -174,17 +171,18 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
return fmt.Errorf("waiting on creating future for Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
}
- resp, err := client.Get(ctx, resourceGroup, name)
+ _, err = client.Get(ctx, resourceGroup, name)
if err != nil {
return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
}
- if resp.ID == nil || *resp.ID == "" {
- return fmt.Errorf("empty or nil ID returned for Log Analytics Cluster %q (Resource Group %q) ID", name, resourceGroup)
- }
+ d.SetId(id.ID(subscriptionId))
- d.SetId(*resp.ID)
- return resourceArmLogAnalyticsClusterRead(d, meta)
+ if keyVaultProps != nil {
+ return resourceArmLogAnalyticsClusterUpdate(d, meta)
+ } else {
+ return resourceArmLogAnalyticsClusterRead(d, meta)
+ }
}
func resourceArmLogAnalyticsClusterRead(d *schema.ResourceData, meta interface{}) error {
@@ -273,11 +271,16 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
return nil, "nil", fmt.Errorf("polling for the status of Log Analytics Cluster %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
}
- if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded {
- return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encountered: %q", id.Name, id.ResourceGroup, string(resp.ClusterProperties.ProvisioningState))
+ if resp.ClusterProperties != nil {
+ if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded {
+ return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encountered: %q", id.Name, id.ResourceGroup, string(resp.ClusterProperties.ProvisioningState))
+ }
+
+ return resp, string(resp.ClusterProperties.ProvisioningState), nil
}
- return resp, string(resp.ClusterProperties.ProvisioningState), nil
+ // I am not returning an error here as this might have just been a bad get
+ return resp, "nil", nil
},
}
diff --git a/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go
index b469ec0e5a61..84db7d322792 100644
--- a/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go
+++ b/azurerm/internal/services/loganalytics/parse/log_analytics_cluster.go
@@ -11,6 +11,18 @@ type LogAnalyticsClusterId struct {
Name string
}
+func NewLogAnalyticsClusterId(name, resourceGroup string) LogAnalyticsClusterId {
+ return LogAnalyticsClusterId{
+ ResourceGroup: resourceGroup,
+ Name: name,
+ }
+}
+
+func (id LogAnalyticsClusterId) ID(subscriptionId string) string {
+ fmtString := "/subscriptions/%s/resourceGroups/%s/providers/Microsoft.OperationalInsights/clusters/%s"
+ return fmt.Sprintf(fmtString, subscriptionId, id.ResourceGroup, id.Name)
+}
+
func LogAnalyticsClusterID(input string) (*LogAnalyticsClusterId, error) {
id, err := azure.ParseAzureResourceID(input)
if err != nil {
@@ -20,9 +32,11 @@ func LogAnalyticsClusterID(input string) (*LogAnalyticsClusterId, error) {
logAnalyticsCluster := LogAnalyticsClusterId{
ResourceGroup: id.ResourceGroup,
}
+
if logAnalyticsCluster.Name, err = id.PopSegment("clusters"); err != nil {
return nil, err
}
+
if err := id.ValidateNoEmptySegments(input); err != nil {
return nil, err
}
From fc5d1618c9c294165a6c8a8497ccc7635405899e Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 12 Nov 2020 13:32:42 -0800
Subject: [PATCH 29/46] Added cluster resize test case
---
.../log_analytics_cluster_resource_test.go | 36 +++++++++++++------
1 file changed, 26 insertions(+), 10 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index e43e0825fb9b..086ac223e3ff 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -30,7 +30,7 @@ func TestAccAzureRMLogAnalyticsCluster_basic(t *testing.T) {
})
}
-func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
+func TestAccAzureRMLogAnalyticsCluster_resize(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
@@ -43,12 +43,19 @@ func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsCluster_requiresImport),
+ data.ImportStep("size_gb"), // not returned by the API
+ {
+ Config: testAccAzureRMLogAnalyticsCluster_resize(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep("size_gb"), // not returned by the API
},
})
}
-func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
+func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
@@ -56,12 +63,23 @@ func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
Steps: []resource.TestStep{
{
- Config: testAccAzureRMLogAnalyticsCluster_basicWithKeyVault(data),
+ Config: testAccAzureRMLogAnalyticsCluster_basic(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep("size_gb"), // not returned by the API
+ data.RequiresImportErrorStep(testAccAzureRMLogAnalyticsCluster_requiresImport),
+ },
+ })
+}
+
+func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
+ resource.ParallelTest(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
+ Steps: []resource.TestStep{
{
Config: testAccAzureRMLogAnalyticsCluster_complete(data),
Check: resource.ComposeTestCheckFunc(
@@ -218,24 +236,22 @@ resource "azurerm_log_analytics_cluster" "test" {
`, template, data.RandomInteger)
}
-func testAccAzureRMLogAnalyticsCluster_basicWithKeyVault(data acceptance.TestData) string {
+func testAccAzureRMLogAnalyticsCluster_resize(data acceptance.TestData) string {
template := testAccAzureRMLogAnalyticsCluster_template(data)
- keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
return fmt.Sprintf(`
%s
-%s
-
resource "azurerm_log_analytics_cluster" "test" {
name = "acctest-LA-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
+ size_gb = 1100
identity {
type = "SystemAssigned"
}
}
-`, template, keyVaultTemplate, data.RandomInteger)
+`, template, data.RandomInteger)
}
func testAccAzureRMLogAnalyticsCluster_requiresImport(data acceptance.TestData) string {
From 9430a46fa3007d3fab891130b599c111c3d7a76d Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 12 Nov 2020 13:35:42 -0800
Subject: [PATCH 30/46] Update docs to reflect new behavior
---
website/docs/r/log_analytics_cluster.html.markdown | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
index 2759df2d3317..1cc40c8e9172 100644
--- a/website/docs/r/log_analytics_cluster.html.markdown
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -74,7 +74,7 @@ An `key_vault_property` block exports the following:
* `key_version` - (Optional) The version of the key associated with the Log Analytics cluster.
-~> **NOTE:** You must first successfully provision a Log Analytics cluster before you can configure the Log Analytics cluster for Customer-Managed Keys by defining a `key_vault_property` block. Customer-Managed Key capability is regional. Your Azure Key Vault, cluster and linked Log Analytics workspaces must be in the same region, but they can be in different subscriptions.
+~> **NOTE:** Customer-Managed Key capability is regional. Your Azure Key Vault, cluster and linked Log Analytics workspaces must be in the same region, but they can be in different subscriptions.
## Attributes Reference
From c586d0113d7520fddc27c0e2e13558d329c028d1 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 12 Nov 2020 17:07:15 -0800
Subject: [PATCH 31/46] Add key property check again
---
.../log_analytics_clusters_resource.go | 10 +++----
.../log_analytics_cluster_resource_test.go | 27 +++++++++++++++++++
.../r/log_analytics_cluster.html.markdown | 2 +-
3 files changed, 33 insertions(+), 6 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
index c0b6c673fa07..2092bd27667c 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
@@ -150,6 +150,10 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
return tf.ImportAsExistsError("azurerm_log_analytics_cluster", *existing.ID)
}
+ if d.IsNewResource() && keyVaultProps != nil {
+ return fmt.Errorf("the Log Analytics Cluster %q (Resource Group %q) must be successfully provisioned before it can be configured to support customer managed keys", name, resourceGroup)
+ }
+
sku := &operationalinsights.ClusterSku{
Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
Name: operationalinsights.CapacityReservation,
@@ -178,11 +182,7 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
d.SetId(id.ID(subscriptionId))
- if keyVaultProps != nil {
- return resourceArmLogAnalyticsClusterUpdate(d, meta)
- } else {
- return resourceArmLogAnalyticsClusterRead(d, meta)
- }
+ return resourceArmLogAnalyticsClusterRead(d, meta)
}
func resourceArmLogAnalyticsClusterRead(d *schema.ResourceData, meta interface{}) error {
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index 086ac223e3ff..ba8212f86425 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -80,6 +80,13 @@ func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsCluster_completePreStep(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep("size_gb"), // not returned by the API
{
Config: testAccAzureRMLogAnalyticsCluster_complete(data),
Check: resource.ComposeTestCheckFunc(
@@ -271,6 +278,26 @@ resource "azurerm_log_analytics_cluster" "import" {
`, config)
}
+func testAccAzureRMLogAnalyticsCluster_completePreStep(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsCluster_template(data)
+ keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
+ return fmt.Sprintf(`
+%s
+
+%s
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
+}
+`, template, keyVaultTemplate, data.RandomInteger)
+}
+
func testAccAzureRMLogAnalyticsCluster_complete(data acceptance.TestData) string {
template := testAccAzureRMLogAnalyticsCluster_template(data)
keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
index 1cc40c8e9172..2759df2d3317 100644
--- a/website/docs/r/log_analytics_cluster.html.markdown
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -74,7 +74,7 @@ An `key_vault_property` block exports the following:
* `key_version` - (Optional) The version of the key associated with the Log Analytics cluster.
-~> **NOTE:** Customer-Managed Key capability is regional. Your Azure Key Vault, cluster and linked Log Analytics workspaces must be in the same region, but they can be in different subscriptions.
+~> **NOTE:** You must first successfully provision a Log Analytics cluster before you can configure the Log Analytics cluster for Customer-Managed Keys by defining a `key_vault_property` block. Customer-Managed Key capability is regional. Your Azure Key Vault, cluster and linked Log Analytics workspaces must be in the same region, but they can be in different subscriptions.
## Attributes Reference
From 1d10ed491e224c7d9b0d6f18cd52b46d0aa7f452 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Thu, 12 Nov 2020 22:17:02 -0800
Subject: [PATCH 32/46] Add test cases for suppress function
---
.../suppress/log_analytics_cluster_test.go | 87 +++++++++++++++++++
1 file changed, 87 insertions(+)
create mode 100644 azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go
diff --git a/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go
new file mode 100644
index 000000000000..9d160b7f3767
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/suppress/log_analytics_cluster_test.go
@@ -0,0 +1,87 @@
+package suppress
+
+import "testing"
+
+func TestCaseClusterUrl(t *testing.T) {
+ cases := []struct {
+ Name string
+ ClusterURL string
+ KeyVaultURL string
+ Suppress bool
+ }{
+ {
+ Name: "empty URL",
+ ClusterURL: "",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "URL with port and wrong scheme",
+ ClusterURL: "http://flynns.arcade.com:443",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "invalid URL scheme",
+ ClusterURL: "https//flynns.arcade.com",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "invalid URL character",
+ ClusterURL: "https://flynns^arcade.com/",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "invalid URL missing scheme",
+ ClusterURL: "//flynns.arcade.com/",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "URL with wrong scheme no port",
+ ClusterURL: "http://flynns.arcade.com",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "same URL different case",
+ ClusterURL: "https://Flynns.Arcade.com/",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: false,
+ },
+ {
+ Name: "full URL with username@host/path?query#fragment",
+ ClusterURL: "https://Creator4983@flynns.arcade.com/ENCOM?games=MatrixBlaster#MCP",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: true,
+ },
+ {
+ Name: "full URL with username:password@host/path?query#fragment",
+ ClusterURL: "https://Creator4983:7898@flynns.arcade.com/ENCOM?games=SpaceParanoids&developer=KevinFlynn#MCP",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: true,
+ },
+ {
+ Name: "URL missing path separator",
+ ClusterURL: "https://flynns.arcade.com",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: true,
+ },
+ {
+ Name: "same URL",
+ ClusterURL: "https://flynns.arcade.com/",
+ KeyVaultURL: "https://flynns.arcade.com/",
+ Suppress: true,
+ },
+ }
+
+ for _, tc := range cases {
+ t.Run(tc.Name, func(t *testing.T) {
+ if LogAnalyticsClusterUrl("test", tc.ClusterURL, tc.KeyVaultURL, nil) != tc.Suppress {
+ t.Fatalf("Expected LogAnalyticsClusterUrl to return %t for '%q' == '%q'", tc.Suppress, tc.ClusterURL, tc.KeyVaultURL)
+ }
+ })
+ }
+}
From caf0a7da367f87f2ea2e1aa406d0c3f56df0d5c1 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Fri, 13 Nov 2020 16:42:05 +0000
Subject: [PATCH 33/46] reworked for meta resource for CMK
---
azurerm/helpers/azure/key_vault_child.go | 5 +
.../loganalytics/log_analytics_cluster.go | 47 ++++
...s_cluster_customer_managed_key_resource.go | 213 ++++++++++++++++++
...e.go => log_analytics_cluster_resource.go} | 133 ++---------
.../services/loganalytics/registration.go | 1 +
...ster_customer_managed_key_resource_test.go | 207 +++++++++++++++++
.../log_analytics_cluster_resource_test.go | 154 +------------
website/azurerm.erb | 4 +
.../r/log_analytics_cluster.html.markdown | 18 +-
...cluster_customer_managed_key.html.markdown | 125 ++++++++++
10 files changed, 623 insertions(+), 284 deletions(-)
create mode 100644 azurerm/internal/services/loganalytics/log_analytics_cluster.go
create mode 100644 azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
rename azurerm/internal/services/loganalytics/{log_analytics_clusters_resource.go => log_analytics_cluster_resource.go} (68%)
create mode 100644 azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
create mode 100644 website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown
diff --git a/azurerm/helpers/azure/key_vault_child.go b/azurerm/helpers/azure/key_vault_child.go
index d352eda26aae..9a3283d5f41e 100644
--- a/azurerm/helpers/azure/key_vault_child.go
+++ b/azurerm/helpers/azure/key_vault_child.go
@@ -15,6 +15,11 @@ type KeyVaultChildID struct {
Version string
}
+func NewKeyVaultChildID(keyVaultBaseUrl, childType, name, version string) string {
+ fmtString := "%s%s/%s/%s"
+ return fmt.Sprintf(fmtString, keyVaultBaseUrl, childType, name, version)
+}
+
func ParseKeyVaultChildID(id string) (*KeyVaultChildID, error) {
// example: https://tharvey-keyvault.vault.azure.net/type/bird/fdf067c93bbb4b22bff4d8b7a9a56217
idURL, err := url.ParseRequestURI(id)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/log_analytics_cluster.go
new file mode 100644
index 000000000000..a96bee0535e5
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster.go
@@ -0,0 +1,47 @@
+package loganalytics
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+)
+
+func logAnalyticsClusterUpdateWaitForState(ctx context.Context, meta interface{}, d *schema.ResourceData, resourceGroup string, clusterName string) *resource.StateChangeConf {
+ return &resource.StateChangeConf{
+ Pending: []string{string(operationalinsights.Updating)},
+ Target: []string{string(operationalinsights.Succeeded)},
+ MinTimeout: 1 * time.Minute,
+ Timeout: d.Timeout(schema.TimeoutUpdate),
+ Refresh: logAnalyticsClusterRefresh(ctx, meta, resourceGroup, clusterName),
+ }
+}
+
+func logAnalyticsClusterRefresh(ctx context.Context, meta interface{}, resourceGroup string, clusterName string) resource.StateRefreshFunc {
+ return func() (interface{}, string, error) {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+
+ log.Printf("[INFO] checking on state of Log Analytics Cluster %q", clusterName)
+
+ resp, err := client.Get(ctx, resourceGroup, clusterName)
+ if err != nil {
+ return nil, "nil", fmt.Errorf("polling for the status of Log Analytics Cluster %q (Resource Group %q): %v", clusterName, resourceGroup, err)
+ }
+
+ if resp.ClusterProperties != nil {
+ if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded {
+ return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encountered: %q", clusterName, resourceGroup, string(resp.ClusterProperties.ProvisioningState))
+ }
+
+ return resp, string(resp.ClusterProperties.ProvisioningState), nil
+ }
+
+ // I am not returning an error here as this might have just been a bad get
+ return resp, "nil", nil
+ }
+}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
new file mode 100644
index 000000000000..a26e878f4dfd
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -0,0 +1,213 @@
+package loganalytics
+
+import (
+ "fmt"
+ "log"
+ "strings"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
+ azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
+)
+
+func resourceArmLogAnalyticsClusterCustomerManagedKey() *schema.Resource {
+ return &schema.Resource{
+ Create: resourceArmLogAnalyticsClusterCustomerManagedKeyCreate,
+ Read: resourceArmLogAnalyticsClusterCustomerManagedKeyRead,
+ Update: resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate,
+ Delete: resourceArmLogAnalyticsClusterCustomerManagedKeyDelete,
+
+ Timeouts: &schema.ResourceTimeout{
+ Create: schema.DefaultTimeout(6 * time.Hour),
+ Read: schema.DefaultTimeout(5 * time.Minute),
+ Update: schema.DefaultTimeout(6 * time.Hour),
+ Delete: schema.DefaultTimeout(30 * time.Minute),
+ },
+
+ Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
+ _, err := parse.LogAnalyticsClusterID(id)
+ return err
+ }),
+
+ Schema: map[string]*schema.Schema{
+ "log_analytics_cluster_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateFunc: validate.LogAnalyticsClustersName,
+ },
+
+ "key_vault_key_id": {
+ Type: schema.TypeString,
+ Required: true,
+ ValidateFunc: azure.ValidateKeyVaultChildIdVersionOptional,
+ },
+ },
+ }
+}
+
+func resourceArmLogAnalyticsClusterCustomerManagedKeyCreate(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ clusterIdRaw := d.Get("log_analytics_cluster_id").(string)
+ clusterId, err := parse.LogAnalyticsClusterID(clusterIdRaw)
+ if err != nil {
+ return err
+ }
+
+ resp, err := client.Get(ctx, clusterId.ResourceGroup, clusterId.Name)
+ if err != nil {
+ if utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("Log Analytics Cluster %q (resource group %q) was not found", clusterId.Name, clusterId.ResourceGroup)
+ }
+ return fmt.Errorf("failed to get details of Log Analytics Cluster %q (resource group %q): %+v", clusterId.Name, clusterId.ResourceGroup, err)
+ }
+ if resp.ClusterProperties != nil && resp.ClusterProperties.KeyVaultProperties != nil {
+ keyProps := *resp.ClusterProperties.KeyVaultProperties
+ if keyProps.KeyName != nil && *keyProps.KeyName != "" {
+ return tf.ImportAsExistsError("azurerm_log_analytics_cluster_customer_managed_key", fmt.Sprintf("%s/CMK", clusterIdRaw))
+ }
+ }
+
+ d.SetId(fmt.Sprintf("%s/CMK", clusterIdRaw))
+ return resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate(d, meta)
+}
+
+func resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ keyId, err := azure.ParseKeyVaultChildIDVersionOptional(d.Get("key_vault_key_id").(string))
+ if err != nil {
+ return fmt.Errorf("could not parse Key Vault Key ID: %+v", err)
+ }
+
+ clusterId, err := parse.LogAnalyticsClusterID(d.Get("log_analytics_cluster_id").(string))
+ if err != nil {
+ return err
+ }
+
+ clusterPatch := operationalinsights.ClusterPatch{
+ ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{
+ KeyVaultProperties: &operationalinsights.KeyVaultProperties{
+ KeyVaultURI: utils.String(keyId.KeyVaultBaseUrl),
+ KeyName: utils.String(keyId.Name),
+ KeyVersion: utils.String(keyId.Version),
+ },
+ },
+ }
+
+ // Shouldn't need this, it's a Patch operation...
+ // resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
+ // if err != nil {
+ // if utils.ResponseWasNotFound(resp.Response) {
+ // return fmt.Errorf("Log Analytics Cluster %q (resource group %q) was not found", id.Name, id.ResourceGroup)
+ // }
+ // return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
+ // }
+
+ // if resp.Sku != nil {
+ // clusterPatch.Sku = resp.Sku
+ // }
+ //
+ // if resp.Tags != nil {
+ // clusterPatch.Tags = resp.Tags
+ // }
+
+ if _, err := client.Update(ctx, clusterId.ResourceGroup, clusterId.Name, clusterPatch); err != nil {
+ return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", clusterId.Name, clusterId.ResourceGroup, err)
+ }
+
+ updateWait := logAnalyticsClusterUpdateWaitForState(ctx, meta, d, clusterId.ResourceGroup, clusterId.Name)
+
+ if _, err := updateWait.WaitForState(); err != nil {
+ return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.Name, clusterId.ResourceGroup, err)
+ }
+
+ return resourceArmLogAnalyticsClusterCustomerManagedKeyRead(d, meta)
+}
+
+func resourceArmLogAnalyticsClusterCustomerManagedKeyRead(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ idRaw := strings.TrimRight(d.Id(), "/CMK")
+
+ id, err := parse.LogAnalyticsClusterID(idRaw)
+ if err != nil {
+ return err
+ }
+
+ resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
+ if err != nil {
+ if utils.ResponseWasNotFound(resp.Response) {
+ log.Printf("[INFO] Log Analytics %q does not exist - removing from state", d.Id())
+ d.SetId("")
+ return nil
+ }
+ return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
+ }
+
+ if props := resp.ClusterProperties; props != nil {
+ if kvProps := props.KeyVaultProperties; kvProps != nil {
+ var keyVaultUri, keyName, keyVersion string
+ if kvProps.KeyVaultURI != nil && *kvProps.KeyVaultURI != "" {
+ keyVaultUri = *kvProps.KeyVaultURI
+ } else {
+ return fmt.Errorf("empty value returned for Key Vault URI")
+ }
+ if kvProps.KeyName != nil && *kvProps.KeyName != "" {
+ keyName = *kvProps.KeyName
+ } else {
+ return fmt.Errorf("empty value returned for Key Vault Key Name")
+ }
+ if kvProps.KeyVersion != nil {
+ keyVersion = *kvProps.KeyVersion
+ }
+ d.Set("key_vault_key_id", azure.NewKeyVaultChildID(keyVaultUri, "keys", keyName, keyVersion))
+ }
+ }
+
+ return tags.FlattenAndSet(d, resp.Tags)
+}
+
+func resourceArmLogAnalyticsClusterCustomerManagedKeyDelete(d *schema.ResourceData, meta interface{}) error {
+ client := meta.(*clients.Client).LogAnalytics.ClusterClient
+ ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
+ defer cancel()
+
+ id, err := parse.LogAnalyticsClusterID(d.Get("log_analytics_cluster_id").(string))
+ if err != nil {
+ return err
+ }
+
+ clusterPatch := operationalinsights.ClusterPatch{
+ ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{
+ KeyVaultProperties: &operationalinsights.KeyVaultProperties{
+ KeyVaultURI: nil,
+ KeyName: nil,
+ KeyVersion: nil,
+ },
+ },
+ }
+
+ _, err = client.Update(ctx, id.ResourceGroup, id.Name, clusterPatch)
+ if err != nil {
+ return fmt.Errorf("removing Log Analytics Cluster Customer Managed Key from cluster %q (resource group %q)", id.Name, id.ResourceGroup)
+ }
+
+ return nil
+}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
similarity index 68%
rename from azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
rename to azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
index 2092bd27667c..63486a884e6c 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_clusters_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
@@ -6,7 +6,6 @@ import (
"time"
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
- "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
@@ -14,7 +13,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
- "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/suppress"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
@@ -82,31 +80,6 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
},
},
- "key_vault_property": {
- Type: schema.TypeList,
- Optional: true,
- MaxItems: 1,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "key_name": {
- Type: schema.TypeString,
- Optional: true,
- },
-
- "key_vault_uri": {
- Type: schema.TypeString,
- Optional: true,
- DiffSuppressFunc: suppress.LogAnalyticsClusterUrl,
- },
-
- "key_version": {
- Type: schema.TypeString,
- Optional: true,
- },
- },
- },
- },
-
// Per the documentation cluster capacity must start at 1000 GB and can go above 3000 GB with an exception by Microsoft
// so I am not limiting the upperbound here by design
// https://docs.microsoft.com/en-us/azure/azure-monitor/platform/manage-cost-storage#log-analytics-dedicated-clusters
@@ -129,6 +102,7 @@ func resourceArmLogAnalyticsCluster() *schema.Resource {
},
}
}
+
func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).LogAnalytics.ClusterClient
subscriptionId := meta.(*clients.Client).Account.SubscriptionId
@@ -137,23 +111,19 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
name := d.Get("name").(string)
resourceGroup := d.Get("resource_group_name").(string)
- keyVaultProps := expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
+
id := parse.NewLogAnalyticsClusterId(name, resourceGroup)
existing, err := client.Get(ctx, resourceGroup, name)
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
- return fmt.Errorf("checking for present of existing Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
+ return fmt.Errorf("checking for presence of existing Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
}
}
if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_log_analytics_cluster", *existing.ID)
}
- if d.IsNewResource() && keyVaultProps != nil {
- return fmt.Errorf("the Log Analytics Cluster %q (Resource Group %q) must be successfully provisioned before it can be configured to support customer managed keys", name, resourceGroup)
- }
-
sku := &operationalinsights.ClusterSku{
Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
Name: operationalinsights.CapacityReservation,
@@ -211,12 +181,16 @@ func resourceArmLogAnalyticsClusterRead(d *schema.ResourceData, meta interface{}
return fmt.Errorf("setting `identity`: %+v", err)
}
if props := resp.ClusterProperties; props != nil {
- if err := d.Set("key_vault_property", flattenArmLogAnalyticsKeyVaultProperties(props.KeyVaultProperties)); err != nil {
- return fmt.Errorf("setting `key_vault_property`: %+v", err)
- }
d.Set("cluster_id", props.ClusterID)
}
- d.Set("size_gb", flattenArmLogAnalyticsClusterSku(resp.Sku))
+
+ capacity := 0
+ if sku := resp.Sku; sku != nil {
+ if sku.Capacity != nil {
+ capacity = int(*sku.Capacity)
+ }
+ }
+ d.Set("size_gb", capacity)
return tags.FlattenAndSet(d, resp.Tags)
}
@@ -231,13 +205,7 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
return err
}
- parameters := operationalinsights.ClusterPatch{
- ClusterPatchProperties: &operationalinsights.ClusterPatchProperties{},
- }
-
- if d.HasChange("key_vault_property") {
- parameters.ClusterPatchProperties.KeyVaultProperties = expandArmLogAnalyticsClusterKeyVaultProperties(d.Get("key_vault_property").([]interface{}))
- }
+ parameters := operationalinsights.ClusterPatch{}
if d.HasChange("size_gb") {
parameters.Sku = &operationalinsights.ClusterSku{
@@ -258,31 +226,7 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
// since the service returns a 200 instantly while it's still updating in the background
log.Printf("[INFO] Checking for Log Analytics Cluster provisioning state")
- updateWait := &resource.StateChangeConf{
- Pending: []string{string(operationalinsights.Updating)},
- Target: []string{string(operationalinsights.Succeeded)},
- MinTimeout: 1 * time.Minute,
- Timeout: d.Timeout(schema.TimeoutUpdate),
- Refresh: func() (interface{}, string, error) {
- log.Printf("[INFO] checking on state of Log Analytics Cluster %q", id.Name)
-
- resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
- if err != nil {
- return nil, "nil", fmt.Errorf("polling for the status of Log Analytics Cluster %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
- }
-
- if resp.ClusterProperties != nil {
- if resp.ClusterProperties.ProvisioningState != operationalinsights.Updating && resp.ClusterProperties.ProvisioningState != operationalinsights.Succeeded {
- return nil, "nil", fmt.Errorf("Log Analytics Cluster %q (Resource Group %q) unexpected Provisioning State encountered: %q", id.Name, id.ResourceGroup, string(resp.ClusterProperties.ProvisioningState))
- }
-
- return resp, string(resp.ClusterProperties.ProvisioningState), nil
- }
-
- // I am not returning an error here as this might have just been a bad get
- return resp, "nil", nil
- },
- }
+ updateWait := logAnalyticsClusterUpdateWaitForState(ctx, meta, d, id.ResourceGroup, id.Name)
if _, err := updateWait.WaitForState(); err != nil {
return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
@@ -322,18 +266,6 @@ func expandArmLogAnalyticsClusterIdentity(input []interface{}) *operationalinsig
}
}
-func expandArmLogAnalyticsClusterKeyVaultProperties(input []interface{}) *operationalinsights.KeyVaultProperties {
- if len(input) == 0 {
- return nil
- }
- v := input[0].(map[string]interface{})
- return &operationalinsights.KeyVaultProperties{
- KeyVaultURI: utils.String(v["key_vault_uri"].(string)),
- KeyName: utils.String(v["key_name"].(string)),
- KeyVersion: utils.String(v["key_version"].(string)),
- }
-}
-
func flattenArmLogAnalyticsIdentity(input *operationalinsights.Identity) []interface{} {
if input == nil {
return make([]interface{}, 0)
@@ -359,42 +291,3 @@ func flattenArmLogAnalyticsIdentity(input *operationalinsights.Identity) []inter
},
}
}
-
-func flattenArmLogAnalyticsKeyVaultProperties(input *operationalinsights.KeyVaultProperties) []interface{} {
- if input == nil {
- return make([]interface{}, 0)
- }
-
- var keyName string
- if input.KeyName != nil {
- keyName = *input.KeyName
- }
- var keyVaultUri string
- if input.KeyVaultURI != nil {
- keyVaultUri = *input.KeyVaultURI
- }
- var keyVersion string
- if input.KeyVersion != nil {
- keyVersion = *input.KeyVersion
- }
- return []interface{}{
- map[string]interface{}{
- "key_name": keyName,
- "key_vault_uri": keyVaultUri,
- "key_version": keyVersion,
- },
- }
-}
-
-func flattenArmLogAnalyticsClusterSku(input *operationalinsights.ClusterSku) interface{} {
- if input == nil {
- return nil
- }
-
- var capacity int
- if input.Capacity != nil {
- capacity = int(*input.Capacity)
- }
-
- return []interface{}{capacity}
-}
diff --git a/azurerm/internal/services/loganalytics/registration.go b/azurerm/internal/services/loganalytics/registration.go
index b5a66a91d6c9..c151a47dd346 100644
--- a/azurerm/internal/services/loganalytics/registration.go
+++ b/azurerm/internal/services/loganalytics/registration.go
@@ -28,6 +28,7 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource {
func (r Registration) SupportedResources() map[string]*schema.Resource {
return map[string]*schema.Resource{
"azurerm_log_analytics_cluster": resourceArmLogAnalyticsCluster(),
+ "azurerm_log_analytics_cluster_customer_managed_key": resourceArmLogAnalyticsClusterCustomerManagedKey(),
"azurerm_log_analytics_datasource_windows_event": resourceArmLogAnalyticsDataSourceWindowsEvent(),
"azurerm_log_analytics_datasource_windows_performance_counter": resourceArmLogAnalyticsDataSourceWindowsPerformanceCounter(),
"azurerm_log_analytics_data_export_rule": resourceArmLogAnalyticsDataExport(),
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
new file mode 100644
index 000000000000..4a64fa9e79bf
--- /dev/null
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
@@ -0,0 +1,207 @@
+package tests
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
+)
+
+func TestAccAzureRMLogAnalyticsClusterCustomerManagedKey_basic(t *testing.T) {
+ data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster_customer_managed_key", "test")
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { acceptance.PreCheck(t) },
+ Providers: acceptance.SupportedProviders,
+ CheckDestroy: testCheckAzureRMLogAnalyticsClusterCustomerManagedKeyDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccAzureRMLogAnalyticsClusterCustomerManagedKey_complete(data),
+ Check: resource.ComposeTestCheckFunc(
+ testCheckAzureRMLogAnalyticsClusterCustomerManagedKeyExists(data.ResourceName),
+ ),
+ },
+ data.ImportStep(),
+ },
+ })
+}
+
+func testCheckAzureRMLogAnalyticsClusterCustomerManagedKeyExists(resourceName string) resource.TestCheckFunc {
+ return func(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
+ ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
+ rs, ok := s.RootModule().Resources[resourceName]
+ if !ok {
+ return fmt.Errorf("Log Analytics Cluster Custoemr Managed Key not found: %s", resourceName)
+ }
+
+ id, err := parse.LogAnalyticsClusterID(strings.TrimRight(rs.Primary.ID, "/CMK"))
+ if err != nil {
+ return err
+ }
+ resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
+ if err != nil {
+ if !utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("bad: get on Log Analytics Cluster for CMK: %+v", err)
+ }
+ }
+ if resp.ClusterProperties == nil || resp.ClusterProperties.KeyVaultProperties == nil {
+ return fmt.Errorf("bad: Log Analytics Cluster has no Cutomer Managed Key Configured")
+ }
+ if resp.ClusterProperties.KeyVaultProperties.KeyVaultURI == nil || *resp.ClusterProperties.KeyVaultProperties.KeyVaultURI == "" {
+ return fmt.Errorf("bad: Log Analytics Cluster Customer Managed Key is not configured")
+ }
+ if resp.ClusterProperties.KeyVaultProperties.KeyName == nil || *resp.ClusterProperties.KeyVaultProperties.KeyName == "" {
+ return fmt.Errorf("bad: Log Analytics Cluster Customer Managed Key is not configured")
+ }
+
+ return nil
+ }
+}
+
+func testCheckAzureRMLogAnalyticsClusterCustomerManagedKeyDestroy(s *terraform.State) error {
+ client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
+ ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext
+
+ for _, rs := range s.RootModule().Resources {
+ if rs.Type != "azurerm_log_analytics_cluster_customer_managed_key" {
+ continue
+ }
+ id, err := parse.LogAnalyticsClusterID(strings.TrimRight(rs.Primary.ID, "/CMK"))
+ if err != nil {
+ return err
+ }
+ resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
+ if err != nil {
+ if !utils.ResponseWasNotFound(resp.Response) {
+ return fmt.Errorf("bad: get on Log Analytics Cluster for CMK: %+v", err)
+ }
+ }
+ if resp.ClusterProperties != nil || resp.ClusterProperties.KeyVaultProperties != nil {
+ if resp.ClusterProperties.KeyVaultProperties.KeyName != nil || *resp.ClusterProperties.KeyVaultProperties.KeyName != "" {
+ return fmt.Errorf("Bad: Log Analytics CLuster Customer Managed Key %q still present", *resp.ClusterProperties.KeyVaultProperties.KeyName)
+ }
+ if resp.ClusterProperties.KeyVaultProperties.KeyVaultURI != nil || *resp.ClusterProperties.KeyVaultProperties.KeyVaultURI != "" {
+ return fmt.Errorf("Bad: Log Analytics CLuster Customer Managed Key Vault URI %q still present", *resp.ClusterProperties.KeyVaultProperties.KeyVaultURI)
+ }
+ if resp.ClusterProperties.KeyVaultProperties.KeyVersion != nil || *resp.ClusterProperties.KeyVaultProperties.KeyVersion != "" {
+ return fmt.Errorf("Bad: Log Analytics CLuster Customer Managed Key Version %q still present", *resp.ClusterProperties.KeyVaultProperties.KeyVersion)
+ }
+ }
+ return nil
+ }
+ return nil
+}
+
+func testAccAzureRMLogAnalyticsClusterCustomerManagedKey_template(data acceptance.TestData) string {
+ return fmt.Sprintf(`
+provider "azurerm" {
+ features {}
+}
+
+resource "azurerm_resource_group" "test" {
+ name = "acctestRG-la-%[1]d"
+ location = "%[2]s"
+}
+
+data "azurerm_client_config" "current" {}
+
+resource "azurerm_log_analytics_cluster" "test" {
+ name = "acctest-LA-%[1]d"
+ resource_group_name = azurerm_resource_group.test.name
+ location = azurerm_resource_group.test.location
+
+ identity {
+ type = "SystemAssigned"
+ }
+}
+
+
+resource "azurerm_key_vault" "test" {
+ name = "vault%[1]d"
+ location = azurerm_resource_group.test.location
+ resource_group_name = azurerm_resource_group.test.name
+ tenant_id = data.azurerm_client_config.current.tenant_id
+
+ sku_name = "premium"
+
+ soft_delete_enabled = true
+ soft_delete_retention_days = 7
+ purge_protection_enabled = true
+}
+
+
+resource "azurerm_key_vault_access_policy" "terraform" {
+ key_vault_id = azurerm_key_vault.test.id
+
+ key_permissions = [
+ "create",
+ "delete",
+ "get",
+ "update",
+ "list",
+ ]
+
+ secret_permissions = [
+ "get",
+ "delete",
+ "set",
+ ]
+
+ tenant_id = data.azurerm_client_config.current.tenant_id
+ object_id = data.azurerm_client_config.current.object_id
+}
+
+resource "azurerm_key_vault_key" "test" {
+ name = "key-%[3]s"
+ key_vault_id = azurerm_key_vault.test.id
+ key_type = "RSA"
+ key_size = 2048
+
+ key_opts = [
+ "decrypt",
+ "encrypt",
+ "sign",
+ "unwrapKey",
+ "verify",
+ "wrapKey",
+ ]
+
+ depends_on = [azurerm_key_vault_access_policy.terraform]
+}
+
+resource "azurerm_key_vault_access_policy" "test" {
+ key_vault_id = azurerm_key_vault.test.id
+
+ key_permissions = [
+ "get",
+ "unwrapkey",
+ "wrapkey"
+ ]
+
+ tenant_id = azurerm_log_analytics_cluster.test.identity.0.tenant_id
+ object_id = azurerm_log_analytics_cluster.test.identity.0.principal_id
+}
+`, data.RandomInteger, data.Locations.Primary, data.RandomString)
+}
+
+func testAccAzureRMLogAnalyticsClusterCustomerManagedKey_complete(data acceptance.TestData) string {
+ template := testAccAzureRMLogAnalyticsClusterCustomerManagedKey_template(data)
+ return fmt.Sprintf(`
+%s
+
+resource "azurerm_log_analytics_cluster_customer_managed_key" "test" {
+ cluster_id = azurerm_log_analytics_cluster.test.id
+ key_vault_key_id = azurerm_key_vault_key.test.id
+
+ depends_on = [azurerm_key_vault_access_policy.test]
+}
+
+`, template)
+}
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
index ba8212f86425..fe2743ad8224 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_resource_test.go
@@ -14,7 +14,7 @@ import (
func TestAccAzureRMLogAnalyticsCluster_basic(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
+ resource.Test(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
@@ -25,14 +25,14 @@ func TestAccAzureRMLogAnalyticsCluster_basic(t *testing.T) {
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep("size_gb"), // not returned by the API
+ data.ImportStep(),
},
})
}
func TestAccAzureRMLogAnalyticsCluster_resize(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
+ resource.Test(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
@@ -43,21 +43,21 @@ func TestAccAzureRMLogAnalyticsCluster_resize(t *testing.T) {
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep("size_gb"), // not returned by the API
+ data.ImportStep(),
{
Config: testAccAzureRMLogAnalyticsCluster_resize(data),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
),
},
- data.ImportStep("size_gb"), // not returned by the API
+ data.ImportStep(),
},
})
}
func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
+ resource.Test(t, resource.TestCase{
PreCheck: func() { acceptance.PreCheck(t) },
Providers: acceptance.SupportedProviders,
CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
@@ -73,31 +73,6 @@ func TestAccAzureRMLogAnalyticsCluster_requiresImport(t *testing.T) {
})
}
-func TestAccAzureRMLogAnalyticsCluster_complete(t *testing.T) {
- data := acceptance.BuildTestData(t, "azurerm_log_analytics_cluster", "test")
- resource.ParallelTest(t, resource.TestCase{
- PreCheck: func() { acceptance.PreCheck(t) },
- Providers: acceptance.SupportedProviders,
- CheckDestroy: testCheckAzureRMLogAnalyticsClusterDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccAzureRMLogAnalyticsCluster_completePreStep(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep("size_gb"), // not returned by the API
- {
- Config: testAccAzureRMLogAnalyticsCluster_complete(data),
- Check: resource.ComposeTestCheckFunc(
- testCheckAzureRMLogAnalyticsClusterExists(data.ResourceName),
- ),
- },
- data.ImportStep("size_gb"), // not returned by the API
- },
- })
-}
-
func testCheckAzureRMLogAnalyticsClusterExists(resourceName string) resource.TestCheckFunc {
return func(s *terraform.State) error {
client := acceptance.AzureProvider.Meta().(*clients.Client).LogAnalytics.ClusterClient
@@ -155,77 +130,6 @@ resource "azurerm_resource_group" "test" {
`, data.RandomInteger, data.Locations.Primary)
}
-func testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data acceptance.TestData) string {
- return fmt.Sprintf(`
-data "azurerm_client_config" "current" {}
-
-resource "azurerm_key_vault" "test" {
- name = "vault%d"
- location = azurerm_resource_group.test.location
- resource_group_name = azurerm_resource_group.test.name
- tenant_id = data.azurerm_client_config.current.tenant_id
-
- sku_name = "premium"
-
- soft_delete_enabled = true
- soft_delete_retention_days = 7
- purge_protection_enabled = true
-}
-
-resource "azurerm_key_vault_key" "test" {
- name = "key-%s"
- key_vault_id = azurerm_key_vault.test.id
- key_type = "RSA"
- key_size = 2048
-
- key_opts = [
- "decrypt",
- "encrypt",
- "sign",
- "unwrapKey",
- "verify",
- "wrapKey",
- ]
-
- depends_on = [azurerm_key_vault_access_policy.subscription]
-}
-
-resource "azurerm_key_vault_access_policy" "subscription" {
- key_vault_id = azurerm_key_vault.test.id
-
- key_permissions = [
- "create",
- "delete",
- "get",
- "update",
- "list",
- ]
-
- secret_permissions = [
- "get",
- "delete",
- "set",
- ]
-
- tenant_id = data.azurerm_client_config.current.tenant_id
- object_id = data.azurerm_client_config.current.object_id
-}
-
-resource "azurerm_key_vault_access_policy" "test" {
- key_vault_id = azurerm_key_vault.test.id
-
- key_permissions = [
- "get",
- "unwrapkey",
- "wrapkey"
- ]
-
- tenant_id = azurerm_log_analytics_cluster.test.identity.0.tenant_id
- object_id = azurerm_log_analytics_cluster.test.identity.0.principal_id
-}
-`, data.RandomInteger, data.RandomString)
-}
-
func testAccAzureRMLogAnalyticsCluster_basic(data acceptance.TestData) string {
template := testAccAzureRMLogAnalyticsCluster_template(data)
return fmt.Sprintf(`
@@ -277,49 +181,3 @@ resource "azurerm_log_analytics_cluster" "import" {
}
`, config)
}
-
-func testAccAzureRMLogAnalyticsCluster_completePreStep(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsCluster_template(data)
- keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
- return fmt.Sprintf(`
-%s
-
-%s
-
-resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
- resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
-
- identity {
- type = "SystemAssigned"
- }
-}
-`, template, keyVaultTemplate, data.RandomInteger)
-}
-
-func testAccAzureRMLogAnalyticsCluster_complete(data acceptance.TestData) string {
- template := testAccAzureRMLogAnalyticsCluster_template(data)
- keyVaultTemplate := testAccAzureRMLogAnalyticsCluster_keyVaultTemplate(data)
- return fmt.Sprintf(`
-%s
-
-%s
-
-resource "azurerm_log_analytics_cluster" "test" {
- name = "acctest-LA-%d"
- resource_group_name = azurerm_resource_group.test.name
- location = azurerm_resource_group.test.location
-
- identity {
- type = "SystemAssigned"
- }
-
- key_vault_property {
- key_name = azurerm_key_vault_key.test.name
- key_vault_uri = azurerm_key_vault.test.vault_uri
- key_version = azurerm_key_vault_key.test.version
- }
-}
-`, template, keyVaultTemplate, data.RandomInteger)
-}
diff --git a/website/azurerm.erb b/website/azurerm.erb
index 1f0e4a96d254..f59da6657435 100644
--- a/website/azurerm.erb
+++ b/website/azurerm.erb
@@ -2003,6 +2003,10 @@
azurerm_log_analytics_cluster
+
+ azurerm_log_analytics_cluster_customer_managed_key
+
+
azurerm_log_analytics_data_export_rule
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
index 2759df2d3317..b0beb7a3019c 100644
--- a/website/docs/r/log_analytics_cluster.html.markdown
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -48,8 +48,6 @@ The following arguments are supported:
* `identity` - (Required) A `identity` block as defined below. Changing this forces a new Log Analytics Cluster to be created.
-* `key_vault_property` - (Optional) A `key_vault_property` block as defined below.
-
* `size_gb` - (Optional) The capacity of the Log Analytics Cluster specified in GB/day. Defaults to 1000.
~> **NOTE:** The `size_gb` can be in the range of 1000 to 3000 GB per day and must be in steps of 100 GB. For `size_gb` levels higher than 3000 GB per day, please contact your Microsoft contact to enable it.
@@ -64,18 +62,6 @@ An `identity` block supports the following:
~> **NOTE:** The assigned `principal_id` and `tenant_id` can be retrieved after the identity `type` has been set to `SystemAssigned` and the Log Analytics Cluster has been created. More details are available below.
----
-
-An `key_vault_property` block exports the following:
-
-* `key_name` - (Optional) The name of the key associated with the Log Analytics cluster.
-
-* `key_vault_uri` - (Optional) The Key Vault uri which holds they key associated with the Log Analytics cluster.
-
-* `key_version` - (Optional) The version of the key associated with the Log Analytics cluster.
-
-~> **NOTE:** You must first successfully provision a Log Analytics cluster before you can configure the Log Analytics cluster for Customer-Managed Keys by defining a `key_vault_property` block. Customer-Managed Key capability is regional. Your Azure Key Vault, cluster and linked Log Analytics workspaces must be in the same region, but they can be in different subscriptions.
-
## Attributes Reference
In addition to the Arguments listed above - the following Attributes are exported:
@@ -84,7 +70,7 @@ In addition to the Arguments listed above - the following Attributes are exporte
* `identity` - A `identity` block as defined below.
-* `cluster_id` - The ID of the cluster.
+* `cluster_id` - The GUID of the cluster.
* `type` - The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
@@ -113,4 +99,4 @@ Log Analytics Clusters can be imported using the `resource id`, e.g.
```shell
terraform import azurerm_log_analytics_cluster.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/clusters/cluster1
-```
\ No newline at end of file
+```
diff --git a/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown b/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown
new file mode 100644
index 000000000000..6951d4aa1c8c
--- /dev/null
+++ b/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown
@@ -0,0 +1,125 @@
+---
+subcategory: "Log Analytics"
+layout: "azurerm"
+page_title: "Azure Resource Manager: azurerm_log_analytics_cluster_customer_managed_key"
+description: |-
+ Manages a Log Analytics Cluster Customer Managed Key.
+---
+
+# azurerm_log_analytics_cluster_customer_managed_key
+
+Manages a Log Analytics Cluster Customer Managed Key.
+
+## Example Usage
+
+```hcl
+provider "azurerm" {
+ features {}
+}
+
+resource "azurerm_resource_group" "example" {
+ name = "example-resources"
+ location = "West Europe"
+}
+
+resource "azurerm_log_analytics_cluster" "example" {
+ name = "example-cluster"
+ resource_group_name = azurerm_resource_group.example.name
+ location = azurerm_resource_group.example.location
+
+ identity {
+ type = "SystemAssigned"
+ }
+}
+
+resource "azurerm_key_vault" "example" {
+ name = "keyvaultkeyexample"
+ location = azurerm_resource_group.example.location
+ resource_group_name = azurerm_resource_group.example.name
+ tenant_id = data.azurerm_client_config.current.tenant_id
+
+ sku_name = "premium"
+
+ access_policy {
+ tenant_id = data.azurerm_client_config.current.tenant_id
+ object_id = data.azurerm_client_config.current.object_id
+
+ key_permissions = [
+ "create",
+ "get",
+ ]
+
+ secret_permissions = [
+ "set",
+ ]
+ }
+
+ tags = {
+ environment = "Production"
+ }
+
+ access_policy {
+ tenant_id = azurerm_log_analytics_cluster.example.identity.0.tenant_id
+ object_id = azurerm_log_analytics_cluster.example.identity.0.principal_id
+
+ key_permissions = [
+ "get",
+ "unwrapkey",
+ "wrapkey",
+ ]
+ }
+
+}
+
+resource "azurerm_key_vault_key" "example" {
+ name = "generated-certificate"
+ key_vault_id = azurerm_key_vault.example.id
+ key_type = "RSA"
+ key_size = 2048
+
+ key_opts = [
+ "decrypt",
+ "encrypt",
+ "sign",
+ "unwrapKey",
+ "verify",
+ "wrapKey",
+ ]
+}
+
+resource "azurerm_log_analytics_cluster_customer_managed_key" "example" {
+ log_analytics_cluster_id = azurerm_log_analytics_cluster.example.id
+ key_vault_key_id = azurerm_key_vault_key.example.id
+}
+```
+
+## Arguments Reference
+
+The following arguments are supported:
+
+* `key_vault_key_id` - (Required) The ID of the Key Vault Key to use for encryption.
+
+* `log_analytics_cluster_id` - (Required) The ID of the Log Analytics Cluster. Changing this forces a new Log Analytics Cluster Custoemr Managed Key to be created.
+
+## Attributes Reference
+
+In addition to the Arguments listed above - the following Attributes are exported:
+
+* `id` - The ID of the Log Analytics Cluster Custoemr Managed Key.
+
+## Timeouts
+
+The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions:
+
+* `create` - (Defaults to 6 hours) Used when creating the Log Analytics Cluster Customer Managed Key.
+* `read` - (Defaults to 5 minutes) Used when retrieving the Log Analytics Cluster Customer Managed Key.
+* `update` - (Defaults to 6 hours) Used when updating the Log Analytics Cluster Customer Managed Key.
+* `delete` - (Defaults to 30 minutes) Used when deleting the Log Analytics Cluster Customer Managed Key.
+
+## Import
+
+Log Analytics Cluster Customer Managed Keys can be imported using the `resource id`, e.g.
+
+```shell
+terraform import azurerm_log_analytics_cluster_customer_managed_key.example /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.OperationalInsights/clusters/cluster1/CMK
+```
From 03e1690cbfd1d8d9d2d0d0794729b188f1120d9b Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Fri, 13 Nov 2020 17:56:56 +0000
Subject: [PATCH 34/46] fix test config
---
...og_analytics_cluster_customer_managed_key_resource_test.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
index 4a64fa9e79bf..f41522223b2d 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
@@ -197,8 +197,8 @@ func testAccAzureRMLogAnalyticsClusterCustomerManagedKey_complete(data acceptanc
%s
resource "azurerm_log_analytics_cluster_customer_managed_key" "test" {
- cluster_id = azurerm_log_analytics_cluster.test.id
- key_vault_key_id = azurerm_key_vault_key.test.id
+ log_analytics_cluster_id = azurerm_log_analytics_cluster.test.id
+ key_vault_key_id = azurerm_key_vault_key.test.id
depends_on = [azurerm_key_vault_access_policy.test]
}
From 6670164bdd7f07ff50e38555382b567a4a4efd09 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 07:16:28 +0000
Subject: [PATCH 35/46] fix validation for id in CMK
---
...s_cluster_customer_managed_key_resource.go | 3 ++-
.../validate/log_analytics_cluster.go | 21 +++++++++++++++++++
.../validate/log_analytics_name.go | 4 ++--
3 files changed, 25 insertions(+), 3 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index a26e878f4dfd..734024d329db 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -6,6 +6,7 @@ import (
"strings"
"time"
+
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
@@ -43,7 +44,7 @@ func resourceArmLogAnalyticsClusterCustomerManagedKey() *schema.Resource {
Type: schema.TypeString,
Required: true,
ForceNew: true,
- ValidateFunc: validate.LogAnalyticsClustersName,
+ ValidateFunc: validate.LogAnalyticsClusterId,
},
"key_vault_key_id": {
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
index 2ef7ffbb4a00..a1d27573b598 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_cluster.go
@@ -1,5 +1,26 @@
package validate
+import (
+ "fmt"
+
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
+)
+
func LogAnalyticsClustersName(i interface{}, k string) (warnings []string, errors []error) {
return logAnalyticsGenericName(i, k)
}
+
+func LogAnalyticsClusterId(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return
+ }
+
+ _, err := parse.LogAnalyticsClusterID(v)
+ if err != nil {
+ errors = append(errors, fmt.Errorf("expected %s to be a Log Analytics Cluster ID:, %+v", k, err))
+ }
+
+ return warnings, errors
+}
diff --git a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
index 390aaa86200a..98f6effcfe28 100644
--- a/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
+++ b/azurerm/internal/services/loganalytics/validate/log_analytics_name.go
@@ -14,11 +14,11 @@ func logAnalyticsGenericName(i interface{}, k string) (warnings []string, errors
return
}
if len(v) < 4 {
- errors = append(errors, fmt.Errorf("length should be greater than %d", 4))
+ errors = append(errors, fmt.Errorf("length should be greater than %d, got %q", 4, v))
return
}
if len(v) > 63 {
- errors = append(errors, fmt.Errorf("length should be less than %d", 63))
+ errors = append(errors, fmt.Errorf("length should be less than %d, got %q", 63, v))
return
}
if !regexp.MustCompile(`^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$`).MatchString(v) {
From 2cbfbdb26cf346f179d3036d120a613d2bb8b678 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 07:24:44 +0000
Subject: [PATCH 36/46] make fmt
---
.../log_analytics_cluster_customer_managed_key_resource.go | 1 -
1 file changed, 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index 734024d329db..5ee54877723f 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -6,7 +6,6 @@ import (
"strings"
"time"
-
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
From 4e0b3abbc8138bc4a1178886c1148f5a090b72db Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 08:06:53 +0000
Subject: [PATCH 37/46] website link typo
---
website/azurerm.erb | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/website/azurerm.erb b/website/azurerm.erb
index f59da6657435..1cce10f2e1b7 100644
--- a/website/azurerm.erb
+++ b/website/azurerm.erb
@@ -2004,7 +2004,7 @@
- azurerm_log_analytics_cluster_customer_managed_key
+ azurerm_log_analytics_cluster_customer_managed_key
From c5e51cef875a83c8120434ca6ed406bda15c0fe3 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 08:13:27 +0000
Subject: [PATCH 38/46] fix panic
---
.../log_analytics_cluster_customer_managed_key_resource.go | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index 5ee54877723f..75dfcd1be4df 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -13,7 +13,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
- "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags"
azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
@@ -181,7 +180,7 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyRead(d *schema.ResourceData
}
}
- return tags.FlattenAndSet(d, resp.Tags)
+ return nil
}
func resourceArmLogAnalyticsClusterCustomerManagedKeyDelete(d *schema.ResourceData, meta interface{}) error {
From 4a4a1411e99cea9b53736a37e4df3c00199d59b2 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 10:41:27 +0000
Subject: [PATCH 39/46] rationalise for when API response contains keyvault tcp
port
---
azurerm/helpers/azure/key_vault_child.go | 15 ++++--
azurerm/helpers/azure/key_vault_child_test.go | 47 ++++++++++++++++++-
...s_cluster_customer_managed_key_resource.go | 23 ++-------
3 files changed, 63 insertions(+), 22 deletions(-)
diff --git a/azurerm/helpers/azure/key_vault_child.go b/azurerm/helpers/azure/key_vault_child.go
index 9a3283d5f41e..59911399c18c 100644
--- a/azurerm/helpers/azure/key_vault_child.go
+++ b/azurerm/helpers/azure/key_vault_child.go
@@ -15,9 +15,18 @@ type KeyVaultChildID struct {
Version string
}
-func NewKeyVaultChildID(keyVaultBaseUrl, childType, name, version string) string {
- fmtString := "%s%s/%s/%s"
- return fmt.Sprintf(fmtString, keyVaultBaseUrl, childType, name, version)
+func NewKeyVaultChildResourceID(keyVaultBaseUrl, childType, name, version string) (string, error) {
+ fmtString := "%s/%s/%s/%s"
+ keyVaultUrl, err := url.Parse(keyVaultBaseUrl)
+ if err != nil || keyVaultBaseUrl == "" {
+ return "", fmt.Errorf("failed to parse Key Vault Base URL %q: %+v", keyVaultBaseUrl, err)
+ }
+ // (@jackofallops) - Log Analytics service adds the port number to the API returns, so we strip it here
+ if hostParts := strings.Split(keyVaultUrl.Host, ":"); len(hostParts) > 1 {
+ keyVaultUrl.Host = hostParts[0]
+ }
+
+ return fmt.Sprintf(fmtString, keyVaultUrl.String(), childType, name, version), nil
}
func ParseKeyVaultChildID(id string) (*KeyVaultChildID, error) {
diff --git a/azurerm/helpers/azure/key_vault_child_test.go b/azurerm/helpers/azure/key_vault_child_test.go
index 2e5951cc7659..0fdbce778ad0 100644
--- a/azurerm/helpers/azure/key_vault_child_test.go
+++ b/azurerm/helpers/azure/key_vault_child_test.go
@@ -1,6 +1,8 @@
package azure
-import "testing"
+import (
+ "testing"
+)
func TestAccAzureRMValidateKeyVaultChildID(t *testing.T) {
cases := []struct {
@@ -320,3 +322,46 @@ func TestAccAzureRMKeyVaultChild_validateName(t *testing.T) {
}
}
}
+
+func TestNewKeyVaultChildResourceID(t *testing.T) {
+ childType := "keys"
+ childName := "test"
+ childVersion := "testVersionString"
+ cases := []struct {
+ Scenario string
+ keyVaultBaseUrl string
+ Expected string
+ ExpectError bool
+ }{
+ {
+ Scenario: "empty values",
+ keyVaultBaseUrl: "",
+ Expected: "",
+ ExpectError: true,
+ },
+ {
+ Scenario: "valid, no port",
+ keyVaultBaseUrl: "https://test.vault.azure.net",
+ Expected: "https://test.vault.azure.net/keys/test/testVersionString",
+ ExpectError: false,
+ },
+ {
+ Scenario: "valid, with port",
+ keyVaultBaseUrl: "https://test.vault.azure.net:443",
+ Expected: "https://test.vault.azure.net/keys/test/testVersionString",
+ ExpectError: false,
+ },
+ }
+ for _, tc := range cases {
+ id, err := NewKeyVaultChildResourceID(tc.keyVaultBaseUrl, childType, childName, childVersion)
+ if err != nil {
+ if !tc.ExpectError {
+ t.Fatalf("Got error for New Resource ID '%s': %+v", tc.keyVaultBaseUrl, err)
+ return
+ }
+ }
+ if id != tc.Expected {
+ t.Fatalf("Expected id for %q to be %q, got %q", tc.keyVaultBaseUrl, tc.Expected, id)
+ }
+ }
+}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index 75dfcd1be4df..909789d5e2bc 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -108,23 +108,6 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate(d *schema.ResourceDa
},
}
- // Shouldn't need this, it's a Patch operation...
- // resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
- // if err != nil {
- // if utils.ResponseWasNotFound(resp.Response) {
- // return fmt.Errorf("Log Analytics Cluster %q (resource group %q) was not found", id.Name, id.ResourceGroup)
- // }
- // return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
- // }
-
- // if resp.Sku != nil {
- // clusterPatch.Sku = resp.Sku
- // }
- //
- // if resp.Tags != nil {
- // clusterPatch.Tags = resp.Tags
- // }
-
if _, err := client.Update(ctx, clusterId.ResourceGroup, clusterId.Name, clusterPatch); err != nil {
return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", clusterId.Name, clusterId.ResourceGroup, err)
}
@@ -176,7 +159,11 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyRead(d *schema.ResourceData
if kvProps.KeyVersion != nil {
keyVersion = *kvProps.KeyVersion
}
- d.Set("key_vault_key_id", azure.NewKeyVaultChildID(keyVaultUri, "keys", keyName, keyVersion))
+ keyVaultKeyId, err := azure.NewKeyVaultChildResourceID(keyVaultUri, "keys", keyName, keyVersion)
+ if err != nil {
+ return err
+ }
+ d.Set("key_vault_key_id", keyVaultKeyId)
}
}
From 00980c91b72cc0d43888aca28b5fcff635847339 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 13:42:02 +0000
Subject: [PATCH 40/46] import passthrough for CMK meta resouce
---
...log_analytics_cluster_customer_managed_key_resource.go | 8 +++-----
1 file changed, 3 insertions(+), 5 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index 909789d5e2bc..2711d0f43bf1 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -13,7 +13,6 @@ import (
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate"
- azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)
@@ -32,10 +31,9 @@ func resourceArmLogAnalyticsClusterCustomerManagedKey() *schema.Resource {
Delete: schema.DefaultTimeout(30 * time.Minute),
},
- Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
- _, err := parse.LogAnalyticsClusterID(id)
- return err
- }),
+ Importer: &schema.ResourceImporter{
+ State: schema.ImportStatePassthrough,
+ },
Schema: map[string]*schema.Schema{
"log_analytics_cluster_id": {
From 3afc65beec7def5105c3f58f3de5ac5195a496fa Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 15:03:37 +0000
Subject: [PATCH 41/46] LRO wait in create, missed setting cluster_id in read
for import
---
...log_analytics_cluster_customer_managed_key_resource.go | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index 2711d0f43bf1..bdbfb53eaaed 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -77,6 +77,12 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyCreate(d *schema.ResourceDa
}
}
+ updateWait := logAnalyticsClusterUpdateWaitForState(ctx, meta, d, clusterId.ResourceGroup, clusterId.Name)
+
+ if _, err := updateWait.WaitForState(); err != nil {
+ return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.Name, clusterId.ResourceGroup, err)
+ }
+
d.SetId(fmt.Sprintf("%s/CMK", clusterIdRaw))
return resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate(d, meta)
}
@@ -131,6 +137,8 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyRead(d *schema.ResourceData
return err
}
+ d.Set("log_analytics_cluster_id", idRaw)
+
resp, err := client.Get(ctx, id.ResourceGroup, id.Name)
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
From 4eba6d5cb2c7be5d0845dc210c7292f8a9f98498 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 16:19:48 +0000
Subject: [PATCH 42/46] refactor poller for multiple timeouts
---
.../loganalytics/log_analytics_cluster.go | 5 ++---
...s_cluster_customer_managed_key_resource.go | 20 +++++++++----------
.../log_analytics_cluster_resource.go | 9 ++++++++-
.../r/log_analytics_cluster.html.markdown | 3 ++-
...cluster_customer_managed_key.html.markdown | 4 ++--
5 files changed, 24 insertions(+), 17 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster.go b/azurerm/internal/services/loganalytics/log_analytics_cluster.go
index a96bee0535e5..fc5e6d21fae4 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster.go
@@ -8,16 +8,15 @@ import (
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights"
"github.com/hashicorp/terraform-plugin-sdk/helper/resource"
- "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
)
-func logAnalyticsClusterUpdateWaitForState(ctx context.Context, meta interface{}, d *schema.ResourceData, resourceGroup string, clusterName string) *resource.StateChangeConf {
+func logAnalyticsClusterWaitForState(ctx context.Context, meta interface{}, timeout time.Duration, resourceGroup string, clusterName string) *resource.StateChangeConf {
return &resource.StateChangeConf{
Pending: []string{string(operationalinsights.Updating)},
Target: []string{string(operationalinsights.Succeeded)},
MinTimeout: 1 * time.Minute,
- Timeout: d.Timeout(schema.TimeoutUpdate),
+ Timeout: timeout,
Refresh: logAnalyticsClusterRefresh(ctx, meta, resourceGroup, clusterName),
}
}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index bdbfb53eaaed..6a868215f3d6 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -77,12 +77,6 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyCreate(d *schema.ResourceDa
}
}
- updateWait := logAnalyticsClusterUpdateWaitForState(ctx, meta, d, clusterId.ResourceGroup, clusterId.Name)
-
- if _, err := updateWait.WaitForState(); err != nil {
- return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.Name, clusterId.ResourceGroup, err)
- }
-
d.SetId(fmt.Sprintf("%s/CMK", clusterIdRaw))
return resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate(d, meta)
}
@@ -116,7 +110,7 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyUpdate(d *schema.ResourceDa
return fmt.Errorf("updating Log Analytics Cluster %q (Resource Group %q): %+v", clusterId.Name, clusterId.ResourceGroup, err)
}
- updateWait := logAnalyticsClusterUpdateWaitForState(ctx, meta, d, clusterId.ResourceGroup, clusterId.Name)
+ updateWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutUpdate), clusterId.ResourceGroup, clusterId.Name)
if _, err := updateWait.WaitForState(); err != nil {
return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.Name, clusterId.ResourceGroup, err)
@@ -181,7 +175,7 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyDelete(d *schema.ResourceDa
ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
defer cancel()
- id, err := parse.LogAnalyticsClusterID(d.Get("log_analytics_cluster_id").(string))
+ clusterId, err := parse.LogAnalyticsClusterID(d.Get("log_analytics_cluster_id").(string))
if err != nil {
return err
}
@@ -196,9 +190,15 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyDelete(d *schema.ResourceDa
},
}
- _, err = client.Update(ctx, id.ResourceGroup, id.Name, clusterPatch)
+ _, err = client.Update(ctx, clusterId.ResourceGroup, clusterId.Name, clusterPatch)
if err != nil {
- return fmt.Errorf("removing Log Analytics Cluster Customer Managed Key from cluster %q (resource group %q)", id.Name, id.ResourceGroup)
+ return fmt.Errorf("removing Log Analytics Cluster Customer Managed Key from cluster %q (resource group %q)", clusterId.Name, clusterId.ResourceGroup)
+ }
+
+ deleteWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutDelete), clusterId.ResourceGroup, clusterId.Name)
+
+ if _, err := deleteWait.WaitForState(); err != nil {
+ return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", clusterId.Name, clusterId.ResourceGroup, err)
}
return nil
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
index 63486a884e6c..5a21eca3555f 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
@@ -150,6 +150,12 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
}
+ createWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutCreate), id.ResourceGroup, id.Name)
+
+ if _, err := createWait.WaitForState(); err != nil {
+ return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
+ }
+
d.SetId(id.ID(subscriptionId))
return resourceArmLogAnalyticsClusterRead(d, meta)
@@ -226,7 +232,7 @@ func resourceArmLogAnalyticsClusterUpdate(d *schema.ResourceData, meta interface
// since the service returns a 200 instantly while it's still updating in the background
log.Printf("[INFO] Checking for Log Analytics Cluster provisioning state")
- updateWait := logAnalyticsClusterUpdateWaitForState(ctx, meta, d, id.ResourceGroup, id.Name)
+ updateWait := logAnalyticsClusterWaitForState(ctx, meta, d.Timeout(schema.TimeoutUpdate), id.ResourceGroup, id.Name)
if _, err := updateWait.WaitForState(); err != nil {
return fmt.Errorf("waiting for Log Analytics Cluster to finish updating %q (Resource Group %q): %v", id.Name, id.ResourceGroup, err)
@@ -253,6 +259,7 @@ func resourceArmLogAnalyticsClusterDelete(d *schema.ResourceData, meta interface
if err := future.WaitForCompletionRef(ctx, client.Client); err != nil {
return fmt.Errorf("waiting on deleting future for Log Analytics Cluster %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err)
}
+
return nil
}
diff --git a/website/docs/r/log_analytics_cluster.html.markdown b/website/docs/r/log_analytics_cluster.html.markdown
index b0beb7a3019c..d46c3a687e95 100644
--- a/website/docs/r/log_analytics_cluster.html.markdown
+++ b/website/docs/r/log_analytics_cluster.html.markdown
@@ -8,8 +8,9 @@ description: |-
# azurerm_log_analytics_cluster
+!> **Important** Due to capacity constraints, Microsoft requires you to pre-register your subscription IDs before you are allowed to create a Log Analytics cluster. Contact Microsoft, or open a support request to register your subscription IDs.
-~> **Important** Due to capacity constraints, Microsoft requires you to pre-register your subscription IDs before you are allowed to create a Log Analytics cluster. Contact Microsoft, or open a support request to register your subscription IDs.
+~> **Note:** Log Analytics Clusters are subject to 14-day soft delete policy. Clusters created with the same resource group & name as a previously deleted cluster will be recovered rather than creating anew.
Manages a Log Analytics Cluster.
diff --git a/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown b/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown
index 6951d4aa1c8c..e6efd6e41d1c 100644
--- a/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown
+++ b/website/docs/r/log_analytics_cluster_customer_managed_key.html.markdown
@@ -99,13 +99,13 @@ The following arguments are supported:
* `key_vault_key_id` - (Required) The ID of the Key Vault Key to use for encryption.
-* `log_analytics_cluster_id` - (Required) The ID of the Log Analytics Cluster. Changing this forces a new Log Analytics Cluster Custoemr Managed Key to be created.
+* `log_analytics_cluster_id` - (Required) The ID of the Log Analytics Cluster. Changing this forces a new Log Analytics Cluster Customer Managed Key to be created.
## Attributes Reference
In addition to the Arguments listed above - the following Attributes are exported:
-* `id` - The ID of the Log Analytics Cluster Custoemr Managed Key.
+* `id` - The ID of the Log Analytics Cluster Customer Managed Key.
## Timeouts
From be477dee0d760afbff3b76ee8a430407f901b3f5 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Mon, 16 Nov 2020 17:31:57 +0000
Subject: [PATCH 43/46] fix crash in checkDestroy
---
.../log_analytics_cluster_customer_managed_key_resource_test.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
index f41522223b2d..3493dc0ae2fe 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
@@ -83,7 +83,7 @@ func testCheckAzureRMLogAnalyticsClusterCustomerManagedKeyDestroy(s *terraform.S
return fmt.Errorf("bad: get on Log Analytics Cluster for CMK: %+v", err)
}
}
- if resp.ClusterProperties != nil || resp.ClusterProperties.KeyVaultProperties != nil {
+ if resp.ClusterProperties != nil && resp.ClusterProperties.KeyVaultProperties != nil {
if resp.ClusterProperties.KeyVaultProperties.KeyName != nil || *resp.ClusterProperties.KeyVaultProperties.KeyName != "" {
return fmt.Errorf("Bad: Log Analytics CLuster Customer Managed Key %q still present", *resp.ClusterProperties.KeyVaultProperties.KeyName)
}
From 06952dfd662844252ab376bb40f655a8344904d6 Mon Sep 17 00:00:00 2001
From: Jeffrey Cline <20408400+WodansSon@users.noreply.github.com>
Date: Mon, 16 Nov 2020 22:15:21 -0800
Subject: [PATCH 44/46] Update CustomerManagedKey test case
---
...og_analytics_cluster_customer_managed_key_resource_test.go | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
index 3493dc0ae2fe..7e5ba44c7d2b 100644
--- a/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
+++ b/azurerm/internal/services/loganalytics/tests/log_analytics_cluster_customer_managed_key_resource_test.go
@@ -124,7 +124,7 @@ resource "azurerm_log_analytics_cluster" "test" {
resource "azurerm_key_vault" "test" {
- name = "vault%[1]d"
+ name = "vault%[3]s"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
tenant_id = data.azurerm_client_config.current.tenant_id
@@ -187,6 +187,8 @@ resource "azurerm_key_vault_access_policy" "test" {
tenant_id = azurerm_log_analytics_cluster.test.identity.0.tenant_id
object_id = azurerm_log_analytics_cluster.test.identity.0.principal_id
+
+ depends_on = [azurerm_key_vault_access_policy.terraform]
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString)
}
From deadd59ee7c925d53ee0c64f6f35c5a90e68597f Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Tue, 17 Nov 2020 07:31:04 +0000
Subject: [PATCH 45/46] review changes
---
...ytics_cluster_customer_managed_key_resource.go | 3 +--
.../log_analytics_cluster_resource.go | 15 ++++++---------
2 files changed, 7 insertions(+), 11 deletions(-)
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
index 6a868215f3d6..def010bce375 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_customer_managed_key_resource.go
@@ -190,8 +190,7 @@ func resourceArmLogAnalyticsClusterCustomerManagedKeyDelete(d *schema.ResourceDa
},
}
- _, err = client.Update(ctx, clusterId.ResourceGroup, clusterId.Name, clusterPatch)
- if err != nil {
+ if _, err = client.Update(ctx, clusterId.ResourceGroup, clusterId.Name, clusterPatch); err != nil {
return fmt.Errorf("removing Log Analytics Cluster Customer Managed Key from cluster %q (resource group %q)", clusterId.Name, clusterId.ResourceGroup)
}
diff --git a/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
index 5a21eca3555f..36a7d3c4361e 100644
--- a/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
+++ b/azurerm/internal/services/loganalytics/log_analytics_cluster_resource.go
@@ -124,16 +124,14 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
return tf.ImportAsExistsError("azurerm_log_analytics_cluster", *existing.ID)
}
- sku := &operationalinsights.ClusterSku{
- Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
- Name: operationalinsights.CapacityReservation,
- }
-
parameters := operationalinsights.Cluster{
Location: utils.String(location.Normalize(d.Get("location").(string))),
Identity: expandArmLogAnalyticsClusterIdentity(d.Get("identity").([]interface{})),
- Sku: sku,
- Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
+ Sku: &operationalinsights.ClusterSku{
+ Capacity: utils.Int64(int64(d.Get("size_gb").(int))),
+ Name: operationalinsights.CapacityReservation,
+ },
+ Tags: tags.Expand(d.Get("tags").(map[string]interface{})),
}
future, err := client.CreateOrUpdate(ctx, resourceGroup, name, parameters)
@@ -145,8 +143,7 @@ func resourceArmLogAnalyticsClusterCreate(d *schema.ResourceData, meta interface
return fmt.Errorf("waiting on creating future for Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
}
- _, err = client.Get(ctx, resourceGroup, name)
- if err != nil {
+ if _, err = client.Get(ctx, resourceGroup, name); err != nil {
return fmt.Errorf("retrieving Log Analytics Cluster %q (Resource Group %q): %+v", name, resourceGroup, err)
}
From c93f10fa3a8600b7a94816247205a34c004f78c1 Mon Sep 17 00:00:00 2001
From: jackofallops
Date: Tue, 17 Nov 2020 07:51:44 +0000
Subject: [PATCH 46/46] update TC config for lognalytics tests
---
.teamcity/components/settings.kt | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/.teamcity/components/settings.kt b/.teamcity/components/settings.kt
index b32bbe5c68cc..139c4ea72b79 100644
--- a/.teamcity/components/settings.kt
+++ b/.teamcity/components/settings.kt
@@ -25,9 +25,12 @@ var serviceTestConfigurationOverrides = mapOf(
// Data Lake has a low quota
"datalake" to testConfiguration(2, defaultStartHour),
- //HSM has low quota and potentially slow recycle time
+ // HSM has low quota and potentially slow recycle time
"hsm" to testConfiguration(1, defaultStartHour),
+ // Log Analytics Clusters have a max deployments of 2 - parallelism set to 1 or `importTest` fails
+ "loganalytics" to testConfiguration(1, defaultStartHour),
+
// servicebus quotas are limited and we experience failures if tests
// execute too quickly as we run out of namespaces in the sub
"servicebus" to testConfiguration(10, defaultStartHour),