diff --git a/internal/services/datafactory/data_factory_trigger_schedule_data_source.go b/internal/services/datafactory/data_factory_trigger_schedule_data_source.go new file mode 100644 index 000000000000..26cde107e190 --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedule_data_source.go @@ -0,0 +1,318 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" // nolint: staticcheck + "github.com/hashicorp/go-azure-sdk/resource-manager/datafactory/2018-06-01/factories" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/parse" + "github.com/hashicorp/terraform-provider-azurerm/internal/services/datafactory/validate" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type TriggerScheduleDataSource struct{} + +type TriggerScheduleDataSourceModel struct { + Name string `tfschema:"name"` + DataFactoryID string `tfschema:"data_factory_id"` + Description string `tfschema:"description"` + Schedule []TriggerScheduleSchedule `tfschema:"schedule"` + StartTime string `tfschema:"start_time"` + EndTime string `tfschema:"end_time"` + TimeZone string `tfschema:"time_zone"` + Frequency string `tfschema:"frequency"` + Interval int `tfschema:"interval"` + Activated bool `tfschema:"activated"` + PipelineName string `tfschema:"pipeline_name"` + Annotations []string `tfschema:"annotations"` +} + +type TriggerScheduleSchedule struct { + DaysOfMonth []int `tfschema:"days_of_month"` + DaysOfWeek []string `tfschema:"days_of_week"` + Hours []int `tfschema:"hours"` + Minutes []int `tfschema:"minutes"` + Monthly []TriggerScheduleScheduleMonthly `tfschema:"monthly"` +} + +type TriggerScheduleScheduleMonthly struct { + Weekday string `tfschema:"weekday"` + Week int `tfschema:"week"` +} + +var _ sdk.DataSource = TriggerScheduleDataSource{} + +func (d TriggerScheduleDataSource) ModelObject() interface{} { + return &TriggerScheduleDataSourceModel{} +} + +func (d TriggerScheduleDataSource) ResourceType() string { + return "azurerm_data_factory_trigger_schedule" +} + +func (d TriggerScheduleDataSource) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "name": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: validate.DataFactoryPipelineAndTriggerName(), + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: factories.ValidateFactoryID, + }, + } +} + +func (d TriggerScheduleDataSource) Attributes() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "description": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "schedule": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "days_of_month": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeInt, + }, + }, + + "days_of_week": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + + "hours": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeInt, + }, + }, + + "minutes": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeInt, + }, + }, + + "monthly": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Resource{ + Schema: map[string]*pluginsdk.Schema{ + "weekday": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "week": { + Type: pluginsdk.TypeInt, + Computed: true, + }, + }, + }, + }, + }, + }, + }, + + "start_time": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "end_time": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "time_zone": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "frequency": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "interval": { + Type: pluginsdk.TypeInt, + Computed: true, + }, + + "activated": { + Type: pluginsdk.TypeBool, + Computed: true, + }, + + "pipeline_name": { + Type: pluginsdk.TypeString, + Computed: true, + }, + + "annotations": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + } +} + +func (d TriggerScheduleDataSource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + var model TriggerScheduleDataSourceModel + if err := metadata.Decode(&model); err != nil { + return err + } + + subscriptionId := metadata.Client.Account.SubscriptionId + client := metadata.Client.DataFactory.TriggersClient + + dataFactoryId, err := factories.ParseFactoryID(model.DataFactoryID) + if err != nil { + return err + } + + id := parse.NewTriggerID(subscriptionId, dataFactoryId.ResourceGroupName, dataFactoryId.FactoryName, model.Name) + if err != nil { + return err + } + + existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") + if err != nil { + if utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Trigger %s not found", id) + } + return fmt.Errorf("retreiving Trigger %s: %+v", id, err) + } + + metadata.SetID(id) + + model.Name = *existing.Name + model.DataFactoryID = dataFactoryId.ID() + + scheduleTriggerProps, ok := existing.Properties.AsScheduleTrigger() + if !ok { + return fmt.Errorf("classifying %s: Expected: %q Received: %q", id.ID(), datafactory.TypeBasicTriggerTypeScheduleTrigger, *existing.Type) + } + + if scheduleTriggerProps != nil { + model.Activated = scheduleTriggerProps.RuntimeState == datafactory.TriggerRuntimeStateStarted + + if recurrence := scheduleTriggerProps.Recurrence; recurrence != nil { + if v := recurrence.StartTime; v != nil { + model.StartTime = v.Format(time.RFC3339) + } + if v := recurrence.EndTime; v != nil { + model.EndTime = v.Format(time.RFC3339) + } + model.Frequency = string(recurrence.Frequency) + model.Interval = int(*recurrence.Interval) + model.TimeZone = *recurrence.TimeZone + + if schedule := recurrence.Schedule; schedule != nil { + model.Schedule = flattenDataFactoryScheduleModel(schedule) + } + } + + if pipelines := scheduleTriggerProps.Pipelines; pipelines != nil { + if len(*pipelines) > 0 { + pipeline := *pipelines + if reference := pipeline[0].PipelineReference; reference != nil { + model.PipelineName = *reference.ReferenceName + } + } + } + + model.Annotations = flattenDataFactoryAnnotations(scheduleTriggerProps.Annotations) + + if scheduleTriggerProps.Description != nil { + model.Description = *scheduleTriggerProps.Description + } + + } + if err := metadata.Encode(&model); err != nil { + return fmt.Errorf("encoding: %+v", err) + } + + return metadata.Encode(&model) + }, + } +} + +func flattenDataFactoryScheduleModel(schedule *datafactory.RecurrenceSchedule) []TriggerScheduleSchedule { + if schedule == nil { + return []TriggerScheduleSchedule{} + } + + result := TriggerScheduleSchedule{} + results := []TriggerScheduleSchedule{} + + if schedule.Hours != nil { + for _, v := range *schedule.Hours { + result.Hours = append(result.Hours, int(v)) + } + } + + if schedule.Minutes != nil { + for _, v := range *schedule.Minutes { + result.Minutes = append(result.Minutes, int(v)) + } + } + + if schedule.MonthDays != nil { + for _, v := range *schedule.MonthDays { + result.DaysOfMonth = append(result.DaysOfMonth, int(v)) + } + } + + if schedule.WeekDays != nil { + weekDays := make([]string, 0) + for _, v := range *schedule.WeekDays { + weekDays = append(weekDays, string(v)) + } + result.DaysOfWeek = weekDays + } + + if schedule.MonthlyOccurrences != nil { + var monthlyOccurrences []TriggerScheduleScheduleMonthly + for _, v := range *schedule.MonthlyOccurrences { + occurrence := TriggerScheduleScheduleMonthly{} + occurrence.Weekday = string(v.Day) + if v.Occurrence != nil { + occurrence.Week = int(*v.Occurrence) + } + monthlyOccurrences = append(monthlyOccurrences, occurrence) + } + result.Monthly = monthlyOccurrences + } + results = append(results, result) + return results +} diff --git a/internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go b/internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go new file mode 100644 index 000000000000..485527917ce8 --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedule_data_source_test.go @@ -0,0 +1,39 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" +) + +type DataFactoryTriggerScheduleDataSource struct{} + +func TestAccDataFactoryTriggerScheduleDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_factory_trigger_schedule", "test") + r := DataFactoryTriggerScheduleDataSource{} + + data.DataSourceTest(t, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("name").Exists(), + ), + }, + }) +} + +func (DataFactoryTriggerScheduleDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_factory_trigger_schedule" "test" { + name = azurerm_data_factory_trigger_schedule.test.name + data_factory_id = azurerm_data_factory.test.id +} +`, TriggerScheduleResource{}.basic(data)) +} diff --git a/internal/services/datafactory/data_factory_trigger_schedules_data_source.go b/internal/services/datafactory/data_factory_trigger_schedules_data_source.go new file mode 100644 index 000000000000..7623b2d1cf8d --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedules_data_source.go @@ -0,0 +1,96 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory + +import ( + "context" + "fmt" + "time" + + "github.com/Azure/go-autorest/autorest" + "github.com/hashicorp/go-azure-sdk/resource-manager/datafactory/2018-06-01/factories" + "github.com/hashicorp/terraform-provider-azurerm/internal/sdk" + "github.com/hashicorp/terraform-provider-azurerm/internal/tf/pluginsdk" + "github.com/hashicorp/terraform-provider-azurerm/utils" +) + +type TriggerSchedulesDataSource struct{} + +type TriggerSchedulesDataSourceModel struct { + DataFactoryID string `tfschema:"data_factory_id"` + Items []string `tfschema:"items"` +} + +func (d TriggerSchedulesDataSource) Arguments() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "data_factory_id": { + Type: pluginsdk.TypeString, + Required: true, + ValidateFunc: factories.ValidateFactoryID, + }, + } +} + +func (d TriggerSchedulesDataSource) Attributes() map[string]*pluginsdk.Schema { + return map[string]*pluginsdk.Schema{ + "items": { + Type: pluginsdk.TypeList, + Computed: true, + Elem: &pluginsdk.Schema{ + Type: pluginsdk.TypeString, + }, + }, + } +} + +func (d TriggerSchedulesDataSource) ModelObject() interface{} { + return &TriggerSchedulesDataSourceModel{} +} + +func (d TriggerSchedulesDataSource) ResourceType() string { + return "azurerm_data_factory_trigger_schedules" +} + +func (d TriggerSchedulesDataSource) Read() sdk.ResourceFunc { + return sdk.ResourceFunc{ + Timeout: 5 * time.Minute, + Func: func(ctx context.Context, metadata sdk.ResourceMetaData) error { + var model TriggerSchedulesDataSourceModel + if err := metadata.Decode(&model); err != nil { + return err + } + + client := metadata.Client.DataFactory.TriggersClient + + dataFactoryId, err := factories.ParseFactoryID(model.DataFactoryID) + if err != nil { + return err + } + + iter, err := client.ListByFactoryComplete(ctx, dataFactoryId.ResourceGroupName, dataFactoryId.FactoryName) + if err != nil { + if v, ok := err.(autorest.DetailedError); ok { + if utils.ResponseWasNotFound(autorest.Response{Response: v.Response}) { + return fmt.Errorf("fetching trigger list from Azure Data Factory %s", dataFactoryId.ID()) + } + } else { + return fmt.Errorf("fetching triggers list from Azure Data Factory %q existence: %+v", dataFactoryId.ID(), err) + } + return fmt.Errorf("fetching triggers list from Azure Data Factory %q existence: %+v", dataFactoryId.ID(), err) + } + + triggers := []string{} + for iter.NotDone() { + trigger := iter.Value() + triggers = append(triggers, *trigger.Name) + iter.NextWithContext(ctx) + } + + metadata.SetID(dataFactoryId) + model.Items = triggers + + return metadata.Encode(&model) + }, + } +} diff --git a/internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go b/internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go new file mode 100644 index 000000000000..afb65185e550 --- /dev/null +++ b/internal/services/datafactory/data_factory_trigger_schedules_data_source_test.go @@ -0,0 +1,38 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package datafactory_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance" + "github.com/hashicorp/terraform-provider-azurerm/internal/acceptance/check" +) + +type DataFactoryTriggerSchedulesDataSource struct{} + +func TestAccDataFactoryTriggerSchedulesDataSource_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_factory_trigger_schedules", "test") + r := DataFactoryTriggerSchedulesDataSource{} + + data.DataSourceTest(t, []acceptance.TestStep{ + { + Config: r.basic(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).Key("items.#").IsNotEmpty(), + ), + }, + }) +} + +func (DataFactoryTriggerSchedulesDataSource) basic(data acceptance.TestData) string { + return fmt.Sprintf(` +%s + +data "azurerm_data_factory_trigger_schedules" "test" { + data_factory_id = azurerm_data_factory.test.id +} +`, TriggerScheduleResource{}.basic(data)) +} diff --git a/internal/services/datafactory/registration.go b/internal/services/datafactory/registration.go index 794ccb721fed..6871909efb4d 100644 --- a/internal/services/datafactory/registration.go +++ b/internal/services/datafactory/registration.go @@ -30,7 +30,10 @@ func (r Registration) WebsiteCategories() []string { } func (Registration) DataSources() []sdk.DataSource { - return []sdk.DataSource{} + return []sdk.DataSource{ + TriggerScheduleDataSource{}, + TriggerSchedulesDataSource{}, + } } func (Registration) Resources() []sdk.Resource { diff --git a/website/docs/d/data_factory_trigger_schedule.html.markdown b/website/docs/d/data_factory_trigger_schedule.html.markdown new file mode 100644 index 000000000000..8864a982a2af --- /dev/null +++ b/website/docs/d/data_factory_trigger_schedule.html.markdown @@ -0,0 +1,86 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_factory_trigger_schedule" +description: |- + Gets information about a trigger schedule in Azure Data Factory (Version 2). +--- + +# Data Source: azurerm_data_factory_trigger_schedule + +Use this data source to access information about a trigger schedule in Azure Data Factory (Version 2). + +## Example Usage + +```hcl +data "azurerm_data_factory_trigger_schedule" "example" { + name = "example_trigger" + data_factory_id = "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1" +} + +output "id" { + value = data.azurerm_data_factory_trigger_schedule.example.id +} +``` + +## Arguments Reference + +The following arguments are supported: + +- `name` - (Required) The name of the trigger schedule. + +- `data_factory_id` - (Required) The id of the Azure Data Factory to fetch trigger schedule from. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +- `id` - The ID of the Azure Data Factory trigger schedule. + +* `description` - The Schedule Trigger's description. + +* `schedule` - A `schedule` block as described below, which further specifies the recurrence schedule for the trigger. + +* `start_time` - The time the Schedule Trigger will start. The time will be represented in UTC. + +* `time_zone` - The timezone of the start/end time. + +* `end_time` - The time the Schedule Trigger should end. The time will be represented in UTC. + +* `interval` - The interval for how often the trigger occurs. + +* `frequency` - The trigger frequency. + +* `activated` - Specifies if the Data Factory Schedule Trigger is activated. + +* `pipeline_name` - The Data Factory Pipeline name that the trigger will act on. + +* `annotations` - List of tags that can be used for describing the Data Factory Schedule Trigger. + +--- + +A `schedule` block exports the following: + +* `days_of_month` - Day(s) of the month on which the trigger is scheduled. + +* `days_of_week` - Days of the week on which the trigger is scheduled. + +* `hours` - Hours of the day on which the trigger is scheduled. + +* `minutes` - Minutes of the hour on which the trigger is scheduled. + +* `monthly` - A `monthly` block as documented below, which specifies the days of the month on which the trigger is scheduled. + +--- + +A `monthly` block exports the following: + +* `weekday` - The day of the week on which the trigger runs. + +* `week` - The occurrence of the specified day during the month. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +- `read` - (Defaults to 5 minutes) Used when retrieving the Azure Data Factory trigger schedule. diff --git a/website/docs/d/data_factory_trigger_schedules.html.markdown b/website/docs/d/data_factory_trigger_schedules.html.markdown new file mode 100644 index 000000000000..3227a9ff6d19 --- /dev/null +++ b/website/docs/d/data_factory_trigger_schedules.html.markdown @@ -0,0 +1,43 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: Data Source: azurerm_data_factory_trigger_schedules" +description: |- + Gets information about all existing trigger schedules in Azure Data Factory (Version 2). +--- + +# Data Source: azurerm_data_factory_trigger_schedules + +Use this data source to access information about all existing trigger schedules in Azure Data Factory (Version 2). + +## Example Usage + +```hcl +data "azurerm_data_factory_trigger_schedules" "example" { + data_factory_id = "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1" +} + +output "items" { + value = data.azurerm_data_factory_trigger_schedules.example.items +} +``` + +## Arguments Reference + +The following arguments are supported: + +- `data_factory_id` - (Required) The id of the Azure Data Factory to fetch trigger schedules from. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +- `id` - The ID of the Azure Data Factory. + +- `items` - A list of trigger schedule names available in this Azure Data Factory. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/language/resources/syntax#operation-timeouts) for certain actions: + +- `read` - (Defaults to 5 minutes) Used when retrieving the Azure Data Factory trigger schedules.