diff --git a/internal/services/streamanalytics/registration.go b/internal/services/streamanalytics/registration.go index af3ca79fbc42..e56e72cc51d5 100644 --- a/internal/services/streamanalytics/registration.go +++ b/internal/services/streamanalytics/registration.go @@ -27,6 +27,7 @@ func (r Registration) Resources() []sdk.Resource { ManagedPrivateEndpointResource{}, OutputFunctionResource{}, OutputTableResource{}, + OutputPowerBIResource{}, OutputCosmosDBResource{}, } } diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go new file mode 100644 index 000000000000..de8e96001168 --- /dev/null +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource.go @@ -0,0 +1,304 @@ +package streamanalytics + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2020-03-01/streamanalytics" + "github.com/hashicorp/go-azure-helpers/lang/response" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/validate" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/validation" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type OutputPowerBIResource struct{} + +var _ sdk.ResourceWithCustomImporter = OutputPowerBIResource{} + +type OutputPowerBIResourceModel struct { + Name string `tfschema:"name"` + StreamAnalyticsJob string `tfschema:"stream_analytics_job_id"` + DataSet string `tfschema:"dataset"` + Table string `tfschema:"table"` + GroupID string `tfschema:"group_id"` + GroupName string `tfschema:"group_name"` +} + +func (r OutputPowerBIResource) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "stream_analytics_job_id": { + Type: pluginsdk.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.StreamingJobID, + }, + + "dataset": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "table": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + + "group_id": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.IsUUID, + }, + + "group_name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validation.StringIsNotEmpty, + }, + } +} + +func (r OutputPowerBIResource) Attributes() map[string]*schema.Schema { + return map[string]*pluginsdk.Schema{} +} + +func (r OutputPowerBIResource) ModelObject() interface{} { + return &OutputPowerBIResourceModel{} +} + +func (r OutputPowerBIResource) ResourceType() string { + return "azurerm_stream_analytics_output_powerbi" +} + +func (r OutputPowerBIResource) Create() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + var model OutputPowerBIResourceModel + if err := metadata.Decode(&model); err != nil { + return err + } + + client := metadata.Client.StreamAnalytics.OutputsClient + subscriptionId := metadata.Client.Account.SubscriptionId + + streamingJobStruct, err := parse.StreamingJobID(model.StreamAnalyticsJob) + if err != nil { + return err + } + id := parse.NewOutputID(subscriptionId, streamingJobStruct.ResourceGroup, streamingJobStruct.Name, model.Name) + + existing, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + if err != nil && !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) + } + + if !utils.ResponseWasNotFound(existing.Response) { + return metadata.ResourceRequiresImport(r.ResourceType(), id) + } + + powerbiOutputProps := &streamanalytics.PowerBIOutputDataSourceProperties{ + Dataset: utils.String(model.DataSet), + Table: utils.String(model.Table), + GroupID: utils.String(model.GroupID), + GroupName: utils.String(model.GroupName), + AuthenticationMode: streamanalytics.AuthenticationMode("Msi"), // Set authentication mode as "Msi" here since other modes requires params obtainable from portal only. + } + + props := streamanalytics.Output{ + Name: utils.String(model.Name), + OutputProperties: &streamanalytics.OutputProperties{ + Datasource: &streamanalytics.PowerBIOutputDataSource{ + Type: streamanalytics.TypeBasicOutputDataSourceTypePowerBI, + PowerBIOutputDataSourceProperties: powerbiOutputProps, + }, + }, + } + + if _, err = client.CreateOrReplace(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, "", ""); err != nil { + return fmt.Errorf("creating %s: %+v", id, err) + } + + metadata.SetID(id) + + return nil + }, + } +} + +func (r OutputPowerBIResource) Update() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.StreamAnalytics.OutputsClient + id, err := parse.OutputID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + var state OutputPowerBIResourceModel + if err := metadata.Decode(&state); err != nil { + return fmt.Errorf("decoding: %+v", err) + } + + needUpdateDataSourceProps := false + dataSourceProps := streamanalytics.PowerBIOutputDataSourceProperties{} + d := metadata.ResourceData + + if d.HasChange("dataset") { + needUpdateDataSourceProps = true + dataSourceProps.Dataset = &state.DataSet + } + + if d.HasChange("table") { + needUpdateDataSourceProps = true + dataSourceProps.Table = &state.Table + } + + if d.HasChange("group_name") { + needUpdateDataSourceProps = true + dataSourceProps.GroupName = &state.GroupName + } + + if d.HasChange("group_id") { + needUpdateDataSourceProps = true + dataSourceProps.GroupID = &state.GroupID + } + + if !needUpdateDataSourceProps { + return nil + } + + updateDataSource := streamanalytics.PowerBIOutputDataSource{ + Type: streamanalytics.TypeBasicOutputDataSourceTypePowerBI, + PowerBIOutputDataSourceProperties: &dataSourceProps, + } + + props := streamanalytics.Output{ + OutputProperties: &streamanalytics.OutputProperties{ + Datasource: updateDataSource, + }, + } + + if _, err = client.Update(ctx, props, id.ResourceGroup, id.StreamingjobName, id.Name, ""); err != nil { + return fmt.Errorf("updating %s: %+v", *id, err) + } + + return nil + }, + } +} + +func (r OutputPowerBIResource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.StreamAnalytics.OutputsClient + id, err := parse.OutputID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return metadata.MarkAsGone(id) + } + return fmt.Errorf("reading %s: %+v", *id, err) + } + + if props := resp.OutputProperties; props != nil && props.Datasource != nil { + v, ok := props.Datasource.AsPowerBIOutputDataSource() + if !ok { + return fmt.Errorf("converting output data source to a powerBI output: %+v", err) + } + + streamingJobId := parse.NewStreamingJobID(id.SubscriptionId, id.ResourceGroup, id.StreamingjobName) + + state := OutputPowerBIResourceModel{ + Name: id.Name, + StreamAnalyticsJob: streamingJobId.ID(), + } + + if v.Dataset != nil { + state.DataSet = *v.Dataset + } + + if v.Table != nil { + state.Table = *v.Table + } + + if v.GroupID != nil { + state.GroupID = *v.GroupID + } + + if v.GroupName != nil { + state.GroupName = *v.GroupName + } + + return metadata.Encode(&state) + } + return nil + }, + } +} + +func (r OutputPowerBIResource) Delete() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 30 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + client := metadata.Client.StreamAnalytics.OutputsClient + id, err := parse.OutputID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + metadata.Logger.Infof("deleting %s", *id) + + if resp, err := client.Delete(ctx, id.ResourceGroup, id.StreamingjobName, id.Name); err != nil { + if !response.WasNotFound(resp.Response) { + return fmt.Errorf("deleting %s: %+v", *id, err) + } + } + return nil + }, + } +} + +func (r OutputPowerBIResource) IDValidationFunc() pluginsdk.SchemaValidateFunc { + return validate.OutputID +} + +func (r OutputPowerBIResource) CustomImporter() sdk.ResourceRunFunc { + return func(ctx context.Context, metadata sdk.ResourceMetaData) error { + id, err := parse.OutputID(metadata.ResourceData.Id()) + if err != nil { + return err + } + + client := metadata.Client.StreamAnalytics.OutputsClient + resp, err := client.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + if err != nil || resp.OutputProperties == nil { + return fmt.Errorf("reading %s: %+v", *id, err) + } + + props := resp.OutputProperties + if _, ok := props.Datasource.AsPowerBIOutputDataSource(); !ok { + return fmt.Errorf("specified output is not of type %s", streamanalytics.TypeBasicOutputDataSourceTypePowerBI) + } + return nil + } +} diff --git a/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go new file mode 100644 index 000000000000..a318506334aa --- /dev/null +++ b/internal/services/streamanalytics/stream_analytics_output_powerbi_resource_test.go @@ -0,0 +1,133 @@ +package streamanalytics_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" + "github.com/hashicorp/terraform-provider-azurerm/internal/clients" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/streamanalytics/parse" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type StreamAnalyticsOutputPowerBIResource struct{} + +func (r StreamAnalyticsOutputPowerBIResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { + id, err := parse.OutputID(state.ID) + if err != nil { + return nil, err + } + + resp, err := client.StreamAnalytics.OutputsClient.Get(ctx, id.ResourceGroup, id.StreamingjobName, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return utils.Bool(false), nil + } + return nil, fmt.Errorf("retrieving %s: %+v", *id, err) + } + return utils.Bool(true), nil +} + +func TestAccStreamAnalyticsOutputPowerBI_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_stream_analytics_output_powerbi", "test") + r := StreamAnalyticsOutputPowerBIResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func TestAccStreamAnalyticsOutputPowerBI_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_stream_analytics_output_powerbi", "test") + r := StreamAnalyticsOutputPowerBIResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + { + Config: r.updated(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + +func (r StreamAnalyticsOutputPowerBIResource) basic(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_output_powerbi" "test" { + name = "acctestoutput-%d" + stream_analytics_job_id = azurerm_stream_analytics_job.test.id + dataset = "foo" + table = "bar" + group_id = "85b3dbca-5974-4067-9669-67a141095a76" + group_name = "some-test-group-name" +} +`, template, data.RandomInteger) +} + +func (r StreamAnalyticsOutputPowerBIResource) updated(data acceptance.TestData) string { + template := r.template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_output_powerbi" "test" { + name = "acctestoutput-%d" + stream_analytics_job_id = azurerm_stream_analytics_job.test.id + dataset = "updated-dataset" + table = "updated-table" + group_id = "e18ff5df-fb66-4f6d-8f27-88c4dcbfc002" + group_name = "some-updated-group-id" +} +`, template, data.RandomInteger) +} + +func (r StreamAnalyticsOutputPowerBIResource) template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%[1]d" + location = "%[2]s" +} + +resource "azurerm_stream_analytics_job" "test" { + name = "acctestjob-%[1]d" + resource_group_name = azurerm_resource_group.test.name + location = azurerm_resource_group.test.location + compatibility_level = "1.0" + data_locale = "en-GB" + events_late_arrival_max_delay_in_seconds = 60 + events_out_of_order_max_delay_in_seconds = 50 + events_out_of_order_policy = "Adjust" + output_error_policy = "Drop" + streaming_units = 3 + + transformation_query = <