diff --git a/azurerm/helpers/validate/datafactory.go b/azurerm/helpers/validate/datafactory.go index 2a66b38f491a..3ea7a2624926 100644 --- a/azurerm/helpers/validate/datafactory.go +++ b/azurerm/helpers/validate/datafactory.go @@ -28,3 +28,14 @@ func DataFactoryName() schema.SchemaValidateFunc { return warnings, errors } } + +func TriggerDelayTimespan() schema.SchemaValidateFunc { + return func(i interface{}, k string) (warnings []string, errors []error) { + value := i.(string) + if !regexp.MustCompile(`^\-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))`).MatchString(value) { + errors = append(errors, fmt.Errorf("invalid timespan, must be of format hh:mm:ss %q: %q", k, value)) + } + + return warnings, errors + } +} diff --git a/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go b/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go index 141d74447dc2..c89fb87b2217 100644 --- a/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go +++ b/azurerm/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go @@ -2,15 +2,11 @@ package datafactory_test import ( "fmt" - "net/http" "testing" "time" "github.com/hashicorp/terraform-plugin-sdk/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/terraform" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" - "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" ) func TestAccAzureRMDataFactoryTriggerSchedule_basic(t *testing.T) { @@ -19,12 +15,12 @@ func TestAccAzureRMDataFactoryTriggerSchedule_basic(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acceptance.PreCheck(t) }, Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataFactoryTriggerScheduleDestroy, + CheckDestroy: testCheckAzureRMDataFactoryTriggerDestroy("azurerm_data_factory_trigger_schedule"), Steps: []resource.TestStep{ { Config: testAccAzureRMDataFactoryTriggerSchedule_basic(data), Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataFactoryTriggerScheduleExists(data.ResourceName), + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), ), }, data.ImportStep(), @@ -40,19 +36,19 @@ func TestAccAzureRMDataFactoryTriggerSchedule_complete(t *testing.T) { resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acceptance.PreCheck(t) }, Providers: acceptance.SupportedProviders, - CheckDestroy: testCheckAzureRMDataFactoryTriggerScheduleDestroy, + CheckDestroy: testCheckAzureRMDataFactoryTriggerDestroy("azurerm_data_factory_trigger_schedule"), Steps: []resource.TestStep{ { Config: testAccAzureRMDataFactoryTriggerSchedule_basic(data), Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataFactoryTriggerScheduleExists(data.ResourceName), + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), ), }, data.ImportStep(), { Config: testAccAzureRMDataFactoryTriggerSchedule_update(data, endTime), Check: resource.ComposeTestCheckFunc( - testCheckAzureRMDataFactoryTriggerScheduleExists(data.ResourceName), + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), ), }, data.ImportStep(), @@ -60,64 +56,6 @@ func TestAccAzureRMDataFactoryTriggerSchedule_complete(t *testing.T) { }) } -func testCheckAzureRMDataFactoryTriggerScheduleExists(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.TriggersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - // Ensure we have enough information in state to look up in API - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Not found: %s", name) - } - - name := rs.Primary.Attributes["name"] - resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] - dataFactoryName := rs.Primary.Attributes["data_factory_name"] - if !hasResourceGroup { - return fmt.Errorf("Bad: no resource group found in state for Data Factory: %s", name) - } - - resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - if err != nil { - return fmt.Errorf("Bad: Get on dataFactory.TriggersClient: %+v", err) - } - - if utils.ResponseWasNotFound(resp.Response) { - return fmt.Errorf("Bad: Data Factory Trigger Schdule %q (data factory name: %q / resource group: %q) does not exist", name, dataFactoryName, resourceGroup) - } - - return nil - } -} - -func testCheckAzureRMDataFactoryTriggerScheduleDestroy(s *terraform.State) error { - client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.TriggersClient - ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext - - for _, rs := range s.RootModule().Resources { - if rs.Type != "azurerm_data_factory_trigger_schedule" { - continue - } - - name := rs.Primary.Attributes["name"] - resourceGroup := rs.Primary.Attributes["resource_group_name"] - dataFactoryName := rs.Primary.Attributes["data_factory_name"] - - resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") - - if err != nil { - return nil - } - - if resp.StatusCode != http.StatusNotFound { - return fmt.Errorf("Data Factory Trigger Schedule still exists:\n%#v", resp.Properties) - } - } - - return nil -} - func testAccAzureRMDataFactoryTriggerSchedule_basic(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { diff --git a/azurerm/internal/services/datafactory/data_factory_trigger_test.go b/azurerm/internal/services/datafactory/data_factory_trigger_test.go new file mode 100644 index 000000000000..f60cadad6b4b --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_trigger_test.go @@ -0,0 +1,125 @@ +package datafactory_test + +import ( + "fmt" + "net/http" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func findTriggerByName(name string, s *terraform.State) (*string, *string, *string, error) { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[name] + if !ok { + return nil, nil, nil, fmt.Errorf("Not found: %s", name) + } + + triggerName := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + if !hasResourceGroup { + return nil, nil, nil, fmt.Errorf("Bad: no resource group found in state for Data Factory: %s", name) + } + + return &triggerName, &dataFactoryName, &resourceGroup, nil +} + +func testCheckAzureRMDataFactoryTriggerStarts(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.TriggersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + name, dataFactoryName, resourceGroup, err := findTriggerByName(name, s) + if err != nil { + return err + } + + future, err := client.Start(ctx, *resourceGroup, *dataFactoryName, *name) + if err != nil { + return err + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return err + } + + return nil + } +} + +func testCheckAzureRMDataFactoryTriggerStops(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.TriggersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + name, dataFactoryName, resourceGroup, err := findTriggerByName(name, s) + if err != nil { + return err + } + + future, err := client.Stop(ctx, *resourceGroup, *dataFactoryName, *name) + if err != nil { + return err + } + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return err + } + + return nil + } +} + +func testCheckAzureRMDataFactoryTriggerExists(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.TriggersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + name, dataFactoryName, resourceGroup, err := findTriggerByName(name, s) + if err != nil { + return err + } + + resp, err := client.Get(ctx, *resourceGroup, *dataFactoryName, *name, "") + if err != nil { + return fmt.Errorf("Bad: Get on dataFactory.TriggersClient: %+v", err) + } + + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Data Factory Trigger %q (data factory name: %q / resource group: %q) does not exist", *name, *dataFactoryName, *resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMDataFactoryTriggerDestroy(resource_type string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataFactory.TriggersClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != resource_type { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Data Factory Trigger still exists:\n%#v", resp.Properties) + } + } + + return nil + } +} diff --git a/azurerm/internal/services/datafactory/data_factory_trigger_tumbling_window_resource.go b/azurerm/internal/services/datafactory/data_factory_trigger_tumbling_window_resource.go new file mode 100644 index 000000000000..0f44ad6782bb --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_trigger_tumbling_window_resource.go @@ -0,0 +1,478 @@ +package datafactory + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/Azure/go-autorest/autorest/date" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/suppress" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataFactoryTriggerTumblingWindow() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataFactoryTriggerTumblingWindowCreateUpdate, + Read: resourceArmDataFactoryTriggerTumblingWindowRead, + Update: resourceArmDataFactoryTriggerTumblingWindowCreateUpdate, + Delete: resourceArmDataFactoryTriggerTumblingWindowDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryPipelineAndTriggerName(), + }, + + // There's a bug in the Azure API where this is returned in lower-case + // BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788 + "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + + "data_factory_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataFactoryName(), + }, + + // This time can only be represented in UTC. + // An issue has been filed in the SDK for the timezone attribute that doesn't seem to work + // https://github.com/Azure/azure-sdk-for-go/issues/6244 + "start_time": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppress.RFC3339Time, + ValidateFunc: validation.IsRFC3339Time, + }, + + // This time can only be represented in UTC. + // An issue has been filed in the SDK for the timezone attribute that doesn't seem to work + // https://github.com/Azure/azure-sdk-for-go/issues/6244 + "end_time": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: suppress.RFC3339Time, + ValidateFunc: validation.IsRFC3339Time, + }, + + "frequency": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datafactory.Minute), + string(datafactory.Hour), + string(datafactory.Day), + string(datafactory.Week), + string(datafactory.Month), + }, false), + }, + + "interval": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntAtLeast(1), + }, + + "max_concurrency": { + Type: schema.TypeInt, + Optional: true, + Default: 1, + ValidateFunc: validation.IntAtLeast(1), + }, + + "pipeline_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataFactoryPipelineAndTriggerName(), + }, + + "delay": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.TriggerDelayTimespan(), + }, + + "pipeline_parameters": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "trigger_dependency": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "size": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.TriggerDelayTimespan(), + }, + "offset": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.TriggerDelayTimespan(), + }, + "trigger": { + Type: schema.TypeString, + Optional: true, + }, + }, + }, + }, + + "retry": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "count": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntAtLeast(0), + }, + "interval": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntAtLeast(0), + }, + }, + }, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringIsNotEmpty, + }, + }, + }, + } +} + +func expandTriggerDependencies(d *schema.ResourceData) []datafactory.BasicDependencyReference { + dependencies := d.Get("trigger_dependency").(*schema.Set).List() + var expandedDependencies []datafactory.BasicDependencyReference + + for _, k := range dependencies { + dep := k.(map[string]interface{}) + var trigger interface{} + + if target := dep["trigger"].(string); target != "" { + trigger = &datafactory.TumblingWindowTriggerDependencyReference{ + ReferenceTrigger: &datafactory.TriggerReference{ + ReferenceName: utils.String(target), + Type: utils.String("TriggerReference"), + }, + } + } else { + trigger = &datafactory.SelfDependencyTumblingWindowTriggerReference{} + } + + var offset, size *string + if v := dep["offset"].(string); v != "" { + offset = utils.String(v) + } + if v := dep["size"].(string); v != "" { + size = utils.String(v) + } + + var basicDependency datafactory.BasicDependencyReference + switch trigger := trigger.(type) { + case *datafactory.TumblingWindowTriggerDependencyReference: + trigger.Offset = offset + trigger.Size = size + basicDependency, _ = trigger.AsBasicDependencyReference() + case *datafactory.SelfDependencyTumblingWindowTriggerReference: + trigger.Offset = offset + trigger.Size = size + basicDependency, _ = trigger.AsBasicDependencyReference() + } + expandedDependencies = append(expandedDependencies, basicDependency) + } + + return expandedDependencies +} + +func flattenTriggerrDependencies(depRefs *[]datafactory.BasicDependencyReference) []interface{} { + outputs := make([]interface{}, 0) + for _, v := range *depRefs { + var size, offset, trigger = "", "", "" + var p_size, p_offset *string + if t, ok := v.AsSelfDependencyTumblingWindowTriggerReference(); ok { + p_size = t.Size + p_offset = t.Offset + trigger = "" + } else if t, ok := v.AsTumblingWindowTriggerDependencyReference(); ok { + p_size = t.Size + p_offset = t.Offset + trigger = *t.ReferenceTrigger.ReferenceName + } + + if p_size != nil { + size = *p_size + } + + if p_offset != nil { + offset = *p_offset + } + outputs = append(outputs, map[string]interface{}{ + "size": size, + "offset": offset, + "trigger": trigger, + }) + } + + return outputs +} + +func flattenRetryPolicy(r *datafactory.RetryPolicy) []interface{} { + /* + Sometimes the API returns RetryPolicy = nil sometimes it + returns RetryPolicy = RetryPolicy{Count=nil, IntervalInSeconds=nil} + in either case leaver retry as nil + */ + if r.Count == nil && r.IntervalInSeconds == nil { + return nil + } + + var count float64 = 0 + if r.Count != nil { + count = r.Count.(float64) + } + + var interval int32 = 0 + if r.IntervalInSeconds != nil { + interval = *r.IntervalInSeconds + } + + retry := map[string]interface{}{ + "count": int(count), + "interval": interval, + } + return []interface{}{retry} +} + +func expandRetryPolicy(d *schema.ResourceData) *datafactory.RetryPolicy { + policy := &datafactory.RetryPolicy{} + if v, ok := d.GetOk("retry.0.count"); ok { + policy.Count = utils.Int32(int32(v.(int))) + } + + if v, ok := d.GetOk("retry.0.interval"); ok { + policy.IntervalInSeconds = utils.Int32(int32(v.(int))) + } + + return policy +} + +func resourceArmDataFactoryTriggerTumblingWindowCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.TriggersClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + log.Printf("[INFO] preparing arguments for Data Factory Trigger Tumbling Window creation.") + + resourceGroupName := d.Get("resource_group_name").(string) + triggerName := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + + if d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroupName, dataFactoryName, triggerName, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Data Factory Trigger Tumbling Window %q (Resource Group %q / Data Factory %q): %s", triggerName, resourceGroupName, dataFactoryName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_trigger_tumbling_window", *existing.ID) + } + } + + props := &datafactory.TumblingWindowTriggerTypeProperties{ + Frequency: datafactory.TumblingWindowFrequency(d.Get("frequency").(string)), + Interval: utils.Int32(int32(d.Get("interval").(int))), + } + + if v, ok := d.GetOk("delay"); ok { + props.Delay = datafactory.TumblingWindowFrequency(v.(string)) + } + + if v, ok := d.GetOk("max_concurrency"); ok { + props.MaxConcurrency = utils.Int32(int32(v.(int))) + } + + if _, ok := d.GetOk("trigger_dependency"); ok { + deps := expandTriggerDependencies(d) + props.DependsOn = &deps + } + + if _, ok := d.GetOk("retry"); ok { + props.RetryPolicy = expandRetryPolicy(d) + } + + if v, ok := d.GetOk("start_time"); ok { + t, _ := time.Parse(time.RFC3339, v.(string)) // should be validated by the schema + props.StartTime = &date.Time{Time: t} + } else { + props.StartTime = &date.Time{Time: time.Now()} + } + + if v, ok := d.GetOk("end_time"); ok { + t, _ := time.Parse(time.RFC3339, v.(string)) // should be validated by the schema + props.EndTime = &date.Time{Time: t} + } + + triggerProps := &datafactory.TumblingWindowTrigger{ + TumblingWindowTriggerTypeProperties: props, + Pipeline: &datafactory.TriggerPipelineReference{ + PipelineReference: &datafactory.PipelineReference{ + ReferenceName: utils.String(d.Get("pipeline_name").(string)), + Type: utils.String("PipelineReference"), + }, + Parameters: d.Get("pipeline_parameters").(map[string]interface{}), + }, + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + triggerProps.Annotations = &annotations + } + + trigger := datafactory.TriggerResource{ + Properties: triggerProps, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroupName, dataFactoryName, triggerName, trigger, ""); err != nil { + return fmt.Errorf("Error creating Data Factory Trigger Tumbling Window %q (Resource Group %q / Data Factory %q): %+v", triggerName, resourceGroupName, dataFactoryName, err) + } + + read, err := client.Get(ctx, resourceGroupName, dataFactoryName, triggerName, "") + if err != nil { + return fmt.Errorf("Error retrieving Data Factory Trigger Tumbling Window %q (Resource Group %q / Data Factory %q): %+v", triggerName, resourceGroupName, dataFactoryName, err) + } + + if read.ID == nil { + return fmt.Errorf("Cannot read Data Factory Trigger Tumbling Window %q (Resource Group %q / Data Factory %q) ID", triggerName, resourceGroupName, dataFactoryName) + } + + d.SetId(*read.ID) + + return resourceArmDataFactoryTriggerTumblingWindowRead(d, meta) +} + +func resourceArmDataFactoryTriggerTumblingWindowRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.TriggersClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + dataFactoryName := id.Path["factories"] + triggerName := id.Path["triggers"] + + resp, err := client.Get(ctx, id.ResourceGroup, dataFactoryName, triggerName, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + log.Printf("[DEBUG] Data Factory Trigger Tumbling Window %q was not found in Resource Group %q - removing from state!", triggerName, id.ResourceGroup) + return nil + } + return fmt.Errorf("Error reading the state of Data Factory Trigger Tumbling Window %q: %+v", triggerName, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("data_factory_name", dataFactoryName) + + tumblingTrigger, ok := resp.Properties.AsTumblingWindowTrigger() + if !ok { + return fmt.Errorf("Error classifiying Data Factory Trigger Tumbling Window %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", triggerName, dataFactoryName, id.ResourceGroup, datafactory.TypeTumblingWindowTrigger, *resp.Type) + } + + if tumblingTrigger != nil { + if tumblingTriggerProps := tumblingTrigger.TumblingWindowTriggerTypeProperties; tumblingTriggerProps != nil { + if v := tumblingTriggerProps.StartTime; v != nil { + d.Set("start_time", v.Format(time.RFC3339)) + } + if v := tumblingTriggerProps.EndTime; v != nil { + d.Set("end_time", v.Format(time.RFC3339)) + } + d.Set("frequency", tumblingTriggerProps.Frequency) + d.Set("interval", tumblingTriggerProps.Interval) + d.Set("max_concurrency", tumblingTriggerProps.MaxConcurrency) + d.Set("delay", tumblingTriggerProps.Delay) + + if v := tumblingTriggerProps.RetryPolicy; v != nil { + d.Set("retry", flattenRetryPolicy(v)) + } + + if v := tumblingTriggerProps.DependsOn; v != nil { + d.Set("trigger_dependency", flattenTriggerrDependencies(v)) + } + } + + if pipeline := tumblingTrigger.Pipeline; pipeline != nil { + if reference := pipeline.PipelineReference; reference != nil { + d.Set("pipeline_name", reference.ReferenceName) + } + d.Set("pipeline_parameters", pipeline.Parameters) + } + + annotations := flattenDataFactoryAnnotations(tumblingTrigger.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations`: %+v", err) + } + } + + return nil +} + +func resourceArmDataFactoryTriggerTumblingWindowDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataFactory.TriggersClient + ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := azure.ParseAzureResourceID(d.Id()) + if err != nil { + return err + } + dataFactoryName := id.Path["factories"] + triggerName := id.Path["triggers"] + + if _, err = client.Delete(ctx, id.ResourceGroup, dataFactoryName, triggerName); err != nil { + return fmt.Errorf("Error deleting Data Factory Trigger Tumbling Window %q (Resource Group %q / Data Factory %q): %+v", triggerName, id.ResourceGroup, dataFactoryName, err) + } + + return nil +} diff --git a/azurerm/internal/services/datafactory/data_factory_trigger_tumbling_window_resource_test.go b/azurerm/internal/services/datafactory/data_factory_trigger_tumbling_window_resource_test.go new file mode 100644 index 000000000000..0085737dd41e --- /dev/null +++ b/azurerm/internal/services/datafactory/data_factory_trigger_tumbling_window_resource_test.go @@ -0,0 +1,300 @@ +package datafactory_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccAzureRMDataFactoryTriggerTumblingWindow_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_trigger_tumbling_window", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryTriggerDestroy("azurerm_data_factory_trigger_tumbling_window"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataFactoryTriggerTumblingWindow_startstop(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_trigger_tumbling_window", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryTriggerDestroy("azurerm_data_factory_trigger_tumbling_window"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), + testCheckAzureRMDataFactoryTriggerStarts(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_basic(data), + ExpectNonEmptyPlan: false, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerStops(data.ResourceName), + ), + }, + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_basic(data), + ExpectNonEmptyPlan: false, + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataFactoryTriggerTumblingWindow_trigger_dependency(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_trigger_tumbling_window", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryTriggerDestroy("azurerm_data_factory_trigger_tumbling_window"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_trigger_dependency(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), + testCheckAzureRMDataFactoryTriggerStarts(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_trigger_dependency(data), + ExpectNonEmptyPlan: false, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerStops(data.ResourceName), + ), + }, + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_trigger_dependency(data), + ExpectNonEmptyPlan: false, + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataFactoryTriggerTumblingWindow_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_trigger_tumbling_window", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataFactoryTriggerDestroy("azurerm_data_factory_trigger_tumbling_window"), + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_update(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerExists(data.ResourceName), + testCheckAzureRMDataFactoryTriggerStarts(data.ResourceName), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_update(data), + ExpectNonEmptyPlan: false, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryTriggerStops(data.ResourceName), + ), + }, + { + Config: testAccAzureRMDataFactoryTriggerTumblingWindow_update(data), + ExpectNonEmptyPlan: false, + }, + data.ImportStep(), + }, + }) +} + +func testAccAzureRMDataFactoryTriggerTumblingWindow_basic(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + parameters = { + test = "testparameter" + } +} + +resource "azurerm_data_factory_trigger_tumbling_window" "test" { + name = "acctestdf%d" + data_factory_name = azurerm_data_factory.test.name + resource_group_name = azurerm_resource_group.test.name + pipeline_name = azurerm_data_factory_pipeline.test.name + + start_time = "2020-09-21T00:00:00Z" + interval = 24 + frequency = "Hour" + + annotations = ["test1", "test2", "test3"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMDataFactoryTriggerTumblingWindow_update(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + parameters = { + test = "testparameter" + } +} + +resource "azurerm_data_factory_trigger_tumbling_window" "test" { + name = "acctestdf%d" + data_factory_name = azurerm_data_factory.test.name + resource_group_name = azurerm_resource_group.test.name + + pipeline_parameters = { + test = "@{formatDateTime(trigger().outputs.windowStartTime,'yyyy-MM-dd')}" + } + + pipeline_name = azurerm_data_factory_pipeline.test.name + + interval = 24 + frequency = "Hour" + max_concurrency = 3 + start_time = "2020-09-21T00:00:00Z" + end_time = "2020-10-21T00:00:00Z" + delay = "16:00:00" + + trigger_dependency { + size = "24:00:00" + offset = "-24:00:00" + } + + retry { + count = 3 + interval = 60 + } + + annotations = ["test1", "test2", "test3"] +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} + +func testAccAzureRMDataFactoryTriggerTumblingWindow_trigger_dependency(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctestRG-df-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + parameters = { + test = "testparameter" + } +} + +resource "azurerm_data_factory_pipeline" "test2" { + name = "acctest%d-2" + resource_group_name = azurerm_resource_group.test.name + data_factory_name = azurerm_data_factory.test.name + + parameters = { + test = "testparameter" + } +} + +resource "azurerm_data_factory_trigger_tumbling_window" "test2" { + name = "acctesttr%d-2" + data_factory_name = azurerm_data_factory.test.name + resource_group_name = azurerm_resource_group.test.name + pipeline_name = azurerm_data_factory_pipeline.test2.name + + start_time = "2020-09-21T00:00:00Z" + interval = 24 + frequency = "Hour" + +} + +resource "azurerm_data_factory_trigger_tumbling_window" "test" { + name = "acctesttr%d" + data_factory_name = azurerm_data_factory.test.name + resource_group_name = azurerm_resource_group.test.name + pipeline_name = azurerm_data_factory_pipeline.test.name + + start_time = "2020-09-21T00:00:00Z" + interval = 24 + frequency = "Hour" + + trigger_dependency { + trigger = azurerm_data_factory_trigger_tumbling_window.test2.name + } + +} +`, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger) +} diff --git a/azurerm/internal/services/datafactory/registration.go b/azurerm/internal/services/datafactory/registration.go index 57c431e172a7..91217ee7411c 100644 --- a/azurerm/internal/services/datafactory/registration.go +++ b/azurerm/internal/services/datafactory/registration.go @@ -53,5 +53,6 @@ func (r Registration) SupportedResources() map[string]*schema.Resource { "azurerm_data_factory_linked_service_web": resourceArmDataFactoryLinkedServiceWeb(), "azurerm_data_factory_pipeline": resourceArmDataFactoryPipeline(), "azurerm_data_factory_trigger_schedule": resourceArmDataFactoryTriggerSchedule(), + "azurerm_data_factory_trigger_tumbling_window": resourceArmDataFactoryTriggerTumblingWindow(), } } diff --git a/website/docs/r/data_factory_trigger_tumbling_window.html.markdown b/website/docs/r/data_factory_trigger_tumbling_window.html.markdown new file mode 100644 index 000000000000..9bab1ed30a51 --- /dev/null +++ b/website/docs/r/data_factory_trigger_tumbling_window.html.markdown @@ -0,0 +1,133 @@ +--- +subcategory: "Data Factory" +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_trigger_tumbling_window" +description: |- + Manages a Tumbling Window Trigger. +--- + +# azurerm_data_factory_trigger_tumbling_window + +Manages a Tumbling Window Trigger inside an Azure Data Factory. + +Datafactory triggers are created in the "Stopped" state and must be manually enabled to start triggering. The API prevents updates to triggers in the "Started" state, so they must be manually paused before running terraform apply. + +## Example Usage + +```hcl + +resource "azurerm_data_factory_trigger_tumbling_window" "example" { + name = "example" + data_factory_name = "example" + resource_group_name = "example" + + pipeline_parameters = { + example = "@{formatDateTime(trigger().outputs.windowStartTime,'yyyy-MM-dd')}" + } + + pipeline_name = "example" + + interval = 24 + frequency = "Hour" + max_concurrency = 3 + start_time = "2020-09-21T00:00:00Z" + end_time = "2020-10-21T00:00:00Z" + delay = "16:00:00" + + // Self dependency + trigger_dependency { + size = "24:00:00" + offset = "-24:00:00" + } + + trigger_dependency { + size = "06:00:00" + offset = "06:00:00" + trigger = "anotherTrigger" + } + + retry { + count = 3 + interval = 60 + } + +} + +``` + +## Arguments Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Trigger. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `resource_group_name` - (Required) The name of the Resource Group where the Tumbling Window Trigger should exist. Changing this forces a new Tumbling Window Trigger to be created. + +* `data_factory_name` - (Required) The name of the Azure Data Factory to associate the Trigger with. Changing this forces a new Tumbling Window Trigger to be created. + +* `pipeline_name` - (Required) The name of the pipeline to be triggered, there is a one to one relationship between pipelines and Tumbling Window Triggers. + +* `start_time` - (Required) The first occurrence, which can be in the past. The first trigger interval is (startTime, startTime + interval). Changing this forces a new Tumbling Window Trigger to be created. Must be in RFC3339 format eg "2020-09-21T00:00:00Z" + +* `frequency` - (Required) The trigger freqency. Valid values include `Minute`, `Hour`, `Day`, `Week`, `Month`. Defaults to `Minute`. Changing this forces a new Tumbling Window Trigger to be created. + +* `interval` - (Required) The interval of the tumbling window. Changing this forces a new Tumbling Window Trigger to be created. + +--- + +* `annotations` - (Optional) List of tags that can be used for describing the Trigger. + +* `delay` - (Optional) The amount of time to delay the start of data processing for the window. The pipeline run is started after the expected execution time plus the amount of delay. The delay defines how long the trigger waits past the due time before triggering a new run. The delay doesn’t alter the window startTime. Must be in Timespan format (hh:mm:ss). + +* `end_time` - (Optional) The last occurrence, which can be in the past. Must be in RFC3339 format eg "2020-09-21T00:00:00Z". + +* `max_concurrency` - (Optional) The number of simultaneous trigger runs that are fired for windows that are ready. . + +* `pipeline_parameters` - (Optional) The pipeline parameters that the trigger will act upon. + +* `retry` - (Optional) A `retry` block as defined below. + +* `trigger_dependency` - (Optional) One or more `trigger_dependency` blocks as defined below. + +--- + +A `retry` block supports the following: + +* `count` - (Required) The number of retries before the pipeline run is marked as "Failed.". + +* `interval` - (Optional) The delay between retry attempts specified in seconds. + +--- + +A `trigger_dependency` block supports the following: + +* `offset` - (Optional) The offset of the dependency trigger. Must be in Timespan format (±hh:mm:ss) and must be a negative offset for a self dependency. + +* `size` - (Optional) The size of the dependency tumbling window. Must be in Timespan format (hh:mm:ss). + +* `trigger` - (Optional) The trigger name to depend on. If not provided creates a SelfDependencyTumblingWindowTriggerReference, in which case `offset` must be negative. + +Up to 5 trigger_dependency blocks may be specified total, with 1 self dependency. + +## Attributes Reference + +In addition to the Arguments listed above - the following Attributes are exported: + +* `id` - The ID of the Tumbling Window Trigger. + +## Timeouts + +The `timeouts` block allows you to specify [timeouts](https://www.terraform.io/docs/configuration/resources.html#timeouts) for certain actions: + +* `create` - (Defaults to 30 minutes) Used when creating the Tumbling Window Trigger. +* `read` - (Defaults to 5 minutes) Used when retrieving the Tumbling Window Trigger. +* `update` - (Defaults to 30 minutes) Used when updating the Tumbling Window Trigger. +* `delete` - (Defaults to 30 minutes) Used when deleting the Tumbling Window Trigger. + +## Import + +Tumbling Window Triggers can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_trigger_tumbling_window.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/ResourceGroupName/providers/Microsoft.DataFactory/factories/DataFactoryName/triggers/TriggerName/ +```