-
Notifications
You must be signed in to change notification settings - Fork 4.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
adding support for log analytics data export #8995
* Initial checkin of data export * Fix lint errors * Update website/docs/r/log_analytics_data_export.html.markdown Co-authored-by: kt <[email protected]> * Update azurerm/internal/services/loganalytics/tests/log_analytics_data_export_resource_test.go Co-authored-by: kt <[email protected]> * Update azurerm/internal/services/loganalytics/log_analytics_data_export_resource.go Co-authored-by: kt <[email protected]> * Updates per PR comments * Fix tableNames var name Co-authored-by: kt <[email protected]>
- Loading branch information
Showing
11 changed files
with
788 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
195 changes: 195 additions & 0 deletions
195
azurerm/internal/services/loganalytics/log_analytics_data_export_resource.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,195 @@ | ||
package loganalytics | ||
|
||
import ( | ||
"fmt" | ||
"log" | ||
"time" | ||
|
||
"github.com/Azure/azure-sdk-for-go/services/preview/operationalinsights/mgmt/2020-03-01-preview/operationalinsights" | ||
"github.com/hashicorp/terraform-plugin-sdk/helper/schema" | ||
"github.com/hashicorp/terraform-plugin-sdk/helper/validation" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/suppress" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/parse" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/loganalytics/validate" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" | ||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" | ||
) | ||
|
||
func resourceArmLogAnalyticsDataExport() *schema.Resource { | ||
return &schema.Resource{ | ||
Create: resourceArmOperationalinsightsDataExportCreateUpdate, | ||
Read: resourceArmOperationalinsightsDataExportRead, | ||
Update: resourceArmOperationalinsightsDataExportCreateUpdate, | ||
Delete: resourceArmOperationalinsightsDataExportDelete, | ||
Importer: &schema.ResourceImporter{ | ||
State: schema.ImportStatePassthrough, | ||
}, | ||
|
||
Timeouts: &schema.ResourceTimeout{ | ||
Create: schema.DefaultTimeout(30 * time.Minute), | ||
Read: schema.DefaultTimeout(5 * time.Minute), | ||
Update: schema.DefaultTimeout(30 * time.Minute), | ||
Delete: schema.DefaultTimeout(30 * time.Minute), | ||
}, | ||
|
||
Schema: map[string]*schema.Schema{ | ||
"name": { | ||
Type: schema.TypeString, | ||
Required: true, | ||
ForceNew: true, | ||
DiffSuppressFunc: suppress.CaseDifference, | ||
ValidateFunc: validate.LogAnalyticsDataExportName, | ||
}, | ||
|
||
"resource_group_name": azure.SchemaResourceGroupName(), | ||
|
||
"workspace_resource_id": { | ||
Type: schema.TypeString, | ||
Required: true, | ||
ForceNew: true, | ||
ValidateFunc: azure.ValidateResourceID, | ||
}, | ||
|
||
"destination_resource_id": { | ||
Type: schema.TypeString, | ||
Required: true, | ||
ValidateFunc: azure.ValidateResourceID, | ||
}, | ||
|
||
"enabled": { | ||
Type: schema.TypeBool, | ||
Optional: true, | ||
Default: false, | ||
}, | ||
|
||
"table_names": { | ||
Type: schema.TypeSet, | ||
Required: true, | ||
MinItems: 1, | ||
Elem: &schema.Schema{ | ||
Type: schema.TypeString, | ||
ValidateFunc: validation.NoZeroValues, | ||
}, | ||
}, | ||
|
||
"export_rule_id": { | ||
Type: schema.TypeString, | ||
Computed: true, | ||
}, | ||
}, | ||
} | ||
} | ||
|
||
func resourceArmOperationalinsightsDataExportCreateUpdate(d *schema.ResourceData, meta interface{}) error { | ||
client := meta.(*clients.Client).LogAnalytics.DataExportClient | ||
ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) | ||
defer cancel() | ||
|
||
name := d.Get("name").(string) | ||
resourceGroup := d.Get("resource_group_name").(string) | ||
workspace, err := parse.LogAnalyticsWorkspaceID(d.Get("workspace_resource_id").(string)) | ||
if err != nil { | ||
return fmt.Errorf("%v", err) | ||
} | ||
|
||
if d.IsNewResource() { | ||
existing, err := client.Get(ctx, resourceGroup, workspace.Name, name) | ||
if err != nil { | ||
if !utils.ResponseWasNotFound(existing.Response) { | ||
return fmt.Errorf("checking for present of existing Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err) | ||
} | ||
} | ||
if existing.ID != nil && *existing.ID != "" { | ||
return tf.ImportAsExistsError("azurerm_log_analytics_data_export_rule", *existing.ID) | ||
} | ||
} | ||
|
||
parameters := operationalinsights.DataExport{ | ||
DataExportProperties: &operationalinsights.DataExportProperties{ | ||
Destination: &operationalinsights.Destination{ | ||
ResourceID: utils.String(d.Get("destination_resource_id").(string)), | ||
}, | ||
TableNames: utils.ExpandStringSlice(d.Get("table_names").(*schema.Set).List()), | ||
Enable: utils.Bool(d.Get("enabled").(bool)), | ||
}, | ||
} | ||
|
||
if _, err := client.CreateOrUpdate(ctx, resourceGroup, workspace.Name, name, parameters); err != nil { | ||
return fmt.Errorf("creating/updating Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err) | ||
} | ||
|
||
resp, err := client.Get(ctx, resourceGroup, workspace.Name, name) | ||
if err != nil { | ||
return fmt.Errorf("retrieving Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", name, resourceGroup, workspace.Name, err) | ||
} | ||
|
||
if resp.ID == nil || *resp.ID == "" { | ||
return fmt.Errorf("empty or nil ID returned for Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q) ID", name, resourceGroup, workspace.Name) | ||
} | ||
|
||
d.SetId(*resp.ID) | ||
return resourceArmOperationalinsightsDataExportRead(d, meta) | ||
} | ||
|
||
func resourceArmOperationalinsightsDataExportRead(d *schema.ResourceData, meta interface{}) error { | ||
client := meta.(*clients.Client).LogAnalytics.DataExportClient | ||
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) | ||
defer cancel() | ||
|
||
id, err := parse.LogAnalyticsDataExportID(d.Id()) | ||
if err != nil { | ||
return err | ||
} | ||
|
||
resp, err := client.Get(ctx, id.ResourceGroup, id.WorkspaceName, id.Name) | ||
if err != nil { | ||
if utils.ResponseWasNotFound(resp.Response) { | ||
log.Printf("[INFO] Log Analytics %q does not exist - removing from state", d.Id()) | ||
d.SetId("") | ||
return nil | ||
} | ||
return fmt.Errorf("retrieving Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err) | ||
} | ||
d.Set("name", id.Name) | ||
d.Set("resource_group_name", id.ResourceGroup) | ||
d.Set("workspace_resource_id", id.WorkspaceID) | ||
if props := resp.DataExportProperties; props != nil { | ||
d.Set("export_rule_id", props.DataExportID) | ||
d.Set("destination_resource_id", flattenArmDataExportDestination(props.Destination)) | ||
d.Set("enabled", props.Enable) | ||
d.Set("table_names", utils.FlattenStringSlice(props.TableNames)) | ||
} | ||
return nil | ||
} | ||
|
||
func resourceArmOperationalinsightsDataExportDelete(d *schema.ResourceData, meta interface{}) error { | ||
client := meta.(*clients.Client).LogAnalytics.DataExportClient | ||
ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) | ||
defer cancel() | ||
|
||
id, err := parse.LogAnalyticsDataExportID(d.Id()) | ||
if err != nil { | ||
return err | ||
} | ||
|
||
if _, err := client.Delete(ctx, id.ResourceGroup, id.WorkspaceName, id.Name); err != nil { | ||
return fmt.Errorf("deleting Log Analytics Data Export Rule %q (Resource Group %q / workspaceName %q): %+v", id.Name, id.ResourceGroup, id.WorkspaceName, err) | ||
} | ||
return nil | ||
} | ||
|
||
func flattenArmDataExportDestination(input *operationalinsights.Destination) string { | ||
if input == nil { | ||
return "" | ||
} | ||
|
||
var resourceID string | ||
if input.ResourceID != nil { | ||
resourceID = *input.ResourceID | ||
} | ||
|
||
return resourceID | ||
} |
43 changes: 43 additions & 0 deletions
43
azurerm/internal/services/loganalytics/parse/log_analytics_data_export.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
package parse | ||
|
||
import ( | ||
"fmt" | ||
|
||
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" | ||
) | ||
|
||
type LogAnalyticsDataExportId struct { | ||
ResourceGroup string | ||
WorkspaceName string | ||
WorkspaceID string | ||
Name string | ||
} | ||
|
||
func LogAnalyticsDataExportID(input string) (*LogAnalyticsDataExportId, error) { | ||
id, err := azure.ParseAzureResourceID(input) | ||
if err != nil { | ||
return nil, fmt.Errorf("parsing Log Analytics Data Export Rule ID %q: %+v", input, err) | ||
} | ||
|
||
logAnalyticsDataExport := LogAnalyticsDataExportId{ | ||
ResourceGroup: id.ResourceGroup, | ||
} | ||
|
||
if logAnalyticsDataExport.WorkspaceName, err = id.PopSegment("workspaces"); err != nil { | ||
return nil, err | ||
} | ||
if logAnalyticsDataExport.WorkspaceID = fmt.Sprintf("/subscriptions/%s/resourcegroups/%s/providers/%s/workspaces/%s", id.SubscriptionID, id.ResourceGroup, id.Provider, logAnalyticsDataExport.WorkspaceName); err != nil { | ||
return nil, fmt.Errorf("formatting Log Analytics Data Export Rule workspace ID %q", input) | ||
} | ||
if logAnalyticsDataExport.Name, err = id.PopSegment("dataExports"); err != nil { | ||
// API Issue the casing changes for the ID | ||
if logAnalyticsDataExport.Name, err = id.PopSegment("dataexports"); err != nil { | ||
return nil, err | ||
} | ||
} | ||
if err := id.ValidateNoEmptySegments(input); err != nil { | ||
return nil, err | ||
} | ||
|
||
return &logAnalyticsDataExport, nil | ||
} |
77 changes: 77 additions & 0 deletions
77
azurerm/internal/services/loganalytics/parse/log_analytics_data_export_test.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
package parse | ||
|
||
import ( | ||
"testing" | ||
) | ||
|
||
func TestLogAnalyticsDataExportID(t *testing.T) { | ||
testData := []struct { | ||
Name string | ||
Input string | ||
Expected *LogAnalyticsDataExportId | ||
}{ | ||
{ | ||
Name: "Empty", | ||
Input: "", | ||
Expected: nil, | ||
}, | ||
{ | ||
Name: "No Resource Groups Segment", | ||
Input: "/subscriptions/00000000-0000-0000-0000-000000000000", | ||
Expected: nil, | ||
}, | ||
{ | ||
Name: "No Resource Groups Value", | ||
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", | ||
Expected: nil, | ||
}, | ||
{ | ||
Name: "Resource Group ID", | ||
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", | ||
Expected: nil, | ||
}, | ||
{ | ||
Name: "Missing DataExport Value", | ||
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataExports", | ||
Expected: nil, | ||
}, | ||
{ | ||
Name: "operationalinsights DataExport ID", | ||
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataExports/dataExport1", | ||
Expected: &LogAnalyticsDataExportId{ | ||
ResourceGroup: "resourceGroup1", | ||
WorkspaceName: "workspace1", | ||
Name: "dataExport1", | ||
}, | ||
}, | ||
{ | ||
Name: "Wrong Casing", | ||
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.OperationalInsights/workspaces/workspace1/DataExports/dataExport1", | ||
Expected: nil, | ||
}, | ||
} | ||
|
||
for _, v := range testData { | ||
t.Logf("[DEBUG] Testing %q..", v.Name) | ||
|
||
actual, err := LogAnalyticsDataExportID(v.Input) | ||
if err != nil { | ||
if v.Expected == nil { | ||
continue | ||
} | ||
t.Fatalf("Expected a value but got an error: %s", err) | ||
} | ||
|
||
if actual.ResourceGroup != v.Expected.ResourceGroup { | ||
t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) | ||
} | ||
|
||
if actual.WorkspaceName != v.Expected.WorkspaceName { | ||
t.Fatalf("Expected %q but got %q for WorkspaceName", v.Expected.WorkspaceName, actual.WorkspaceName) | ||
} | ||
|
||
if actual.Name != v.Expected.Name { | ||
t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.