Skip to content

Commit

Permalink
data_factory_trigger_schedule - pipeline (#16922)
Browse files Browse the repository at this point in the history
  • Loading branch information
qiqingzhang authored Jun 1, 2022
1 parent db80de2 commit 81d2806
Show file tree
Hide file tree
Showing 3 changed files with 181 additions and 25 deletions.
123 changes: 100 additions & 23 deletions internal/services/datafactory/data_factory_trigger_schedule_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,15 +185,44 @@ func resourceDataFactoryTriggerSchedule() *pluginsdk.Resource {
Default: true,
},

"pipeline": {
Type: pluginsdk.TypeList,
Optional: true,
Computed: true,
ConflictsWith: []string{"pipeline_parameters"},
ExactlyOneOf: []string{"pipeline", "pipeline_name"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"name": {
Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validate.DataFactoryPipelineAndTriggerName(),
},

"parameters": {
Type: pluginsdk.TypeMap,
Optional: true,
Elem: &pluginsdk.Schema{
Type: pluginsdk.TypeString,
},
},
},
},
},

"pipeline_name": {
Type: pluginsdk.TypeString,
Required: true,
Optional: true,
Computed: true,
ExactlyOneOf: []string{"pipeline", "pipeline_name"},
ValidateFunc: validate.DataFactoryPipelineAndTriggerName(),
},

"pipeline_parameters": {
Type: pluginsdk.TypeMap,
Optional: true,
Type: pluginsdk.TypeMap,
Optional: true,
Computed: true,
ConflictsWith: []string{"pipeline"},
Elem: &pluginsdk.Schema{
Type: pluginsdk.TypeString,
},
Expand Down Expand Up @@ -257,20 +286,23 @@ func resourceDataFactoryTriggerScheduleCreate(d *pluginsdk.ResourceData, meta in
props.Recurrence.EndTime = &date.Time{Time: t}
}

reference := &datafactory.PipelineReference{
ReferenceName: utils.String(d.Get("pipeline_name").(string)),
Type: utils.String("PipelineReference"),
}

scheduleProps := &datafactory.ScheduleTrigger{
ScheduleTriggerTypeProperties: props,
Pipelines: &[]datafactory.TriggerPipelineReference{
Description: utils.String(d.Get("description").(string)),
}

if pipelineName := d.Get("pipeline_name").(string); len(pipelineName) != 0 {
scheduleProps.Pipelines = &[]datafactory.TriggerPipelineReference{
{
PipelineReference: reference,
Parameters: d.Get("pipeline_parameters").(map[string]interface{}),
PipelineReference: &datafactory.PipelineReference{
ReferenceName: utils.String(pipelineName),
Type: utils.String("PipelineReference"),
},
Parameters: d.Get("pipeline_parameters").(map[string]interface{}),
},
},
Description: utils.String(d.Get("description").(string)),
}
} else {
scheduleProps.Pipelines = expandDataFactoryPipelines(d.Get("pipeline").([]interface{}))
}

if v, ok := d.GetOk("annotations"); ok {
Expand Down Expand Up @@ -347,20 +379,25 @@ func resourceDataFactoryTriggerScheduleUpdate(d *pluginsdk.ResourceData, meta in
props.Recurrence.EndTime = &date.Time{Time: t}
}

reference := &datafactory.PipelineReference{
ReferenceName: utils.String(d.Get("pipeline_name").(string)),
Type: utils.String("PipelineReference"),
}

scheduleProps := &datafactory.ScheduleTrigger{
ScheduleTriggerTypeProperties: props,
Pipelines: &[]datafactory.TriggerPipelineReference{
Description: utils.String(d.Get("description").(string)),
}

pipelineName := d.Get("pipeline_name").(string)
pipeline := d.Get("pipeline").([]interface{})
if (d.HasChange("pipeline_name") && len(pipelineName) == 0) || (d.HasChange("pipeline") && len(pipeline) != 0) {
scheduleProps.Pipelines = expandDataFactoryPipelines(pipeline)
} else {
scheduleProps.Pipelines = &[]datafactory.TriggerPipelineReference{
{
PipelineReference: reference,
Parameters: d.Get("pipeline_parameters").(map[string]interface{}),
PipelineReference: &datafactory.PipelineReference{
ReferenceName: utils.String(pipelineName),
Type: utils.String("PipelineReference"),
},
Parameters: d.Get("pipeline_parameters").(map[string]interface{}),
},
},
Description: utils.String(d.Get("description").(string)),
}
}

if v, ok := d.GetOk("annotations"); ok {
Expand Down Expand Up @@ -445,6 +482,7 @@ func resourceDataFactoryTriggerScheduleRead(d *pluginsdk.ResourceData, meta inte
d.Set("pipeline_name", reference.ReferenceName)
}
d.Set("pipeline_parameters", pipeline[0].Parameters)
d.Set("pipeline", flattenDataFactoryPipelines(pipelines))
}
}

Expand Down Expand Up @@ -560,3 +598,42 @@ func flattenDataFactorySchedule(schedule *datafactory.RecurrenceSchedule) []inte
}
return []interface{}{value}
}

func expandDataFactoryPipelines(input []interface{}) *[]datafactory.TriggerPipelineReference {
if len(input) == 0 {
return nil
}

pipes := make([]datafactory.TriggerPipelineReference, 0)

for _, item := range input {
config := item.(map[string]interface{})
v := datafactory.TriggerPipelineReference{
PipelineReference: &datafactory.PipelineReference{
ReferenceName: utils.String(config["name"].(string)),
Type: utils.String("PipelineReference"),
},
Parameters: config["parameters"].(map[string]interface{}),
}
pipes = append(pipes, v)
}

return &pipes
}

func flattenDataFactoryPipelines(pipelines *[]datafactory.TriggerPipelineReference) interface{} {
if pipelines == nil {
return []interface{}{}
}

res := make([]interface{}, 0)

for _, item := range *pipelines {
v := make(map[string]interface{})
v["name"] = utils.String(*item.PipelineReference.ReferenceName)
v["parameters"] = item.Parameters
res = append(res, v)
}

return &res
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,21 @@ func TestAccDataFactoryTriggerSchedule_basic(t *testing.T) {
})
}

func TestAccDataFactoryTriggerSchedule_pipeline(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_trigger_schedule", "test")
r := TriggerScheduleResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.pipeline(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func TestAccDataFactoryTriggerSchedule_complete(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_trigger_schedule", "test")
r := TriggerScheduleResource{}
Expand Down Expand Up @@ -132,6 +147,60 @@ resource "azurerm_data_factory_trigger_schedule" "test" {
`, data.RandomInteger, data.Locations.Primary)
}

func (TriggerScheduleResource) pipeline(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%[1]d"
location = "%s"
}
resource "azurerm_data_factory" "test" {
name = "acctestdf%[1]d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
}
resource "azurerm_data_factory_pipeline" "test1" {
name = "acctest%[1]d"
data_factory_id = azurerm_data_factory.test.id
parameters = {
test = "testparameter1"
}
}
resource "azurerm_data_factory_pipeline" "test2" {
name = "acctests%[1]d"
data_factory_id = azurerm_data_factory.test.id
parameters = {
test = "testparameter2"
}
}
resource "azurerm_data_factory_trigger_schedule" "test" {
name = "acctestdf%[1]d"
data_factory_id = azurerm_data_factory.test.id
pipeline {
name = azurerm_data_factory_pipeline.test1.name
parameters = azurerm_data_factory_pipeline.test1.parameters
}
pipeline {
name = azurerm_data_factory_pipeline.test2.name
parameters = azurerm_data_factory_pipeline.test2.parameters
}
annotations = ["test1", "test2", "test3"]
}
`, data.RandomInteger, data.Locations.Primary)
}

func (TriggerScheduleResource) update(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
Expand Down
14 changes: 12 additions & 2 deletions website/docs/r/data_factory_trigger_schedule.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,6 @@ The following arguments are supported:

* `data_factory_id` - (Required) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

* `pipeline_name` - (Required) The Data Factory Pipeline name that the trigger will act on.

* `description` - (Optional) The Schedule Trigger's description.

* `schedule` - (Optional) A `schedule` block as defined below, which further specifies the recurrence schedule for the trigger. A schedule is capable of limiting or increasing the number of trigger executions specified by the `frequency` and `interval` properties.
Expand All @@ -66,6 +64,10 @@ The following arguments are supported:

* `activated` - (Optional) Specifies if the Data Factory Schedule Trigger is activated. Defaults to `true`.

* `pipeline` - (Optional) block as defined below.

* `pipeline_name` - (Optional) The Data Factory Pipeline name that the trigger will act on.

* `pipeline_parameters` - (Optional) The pipeline parameters that the trigger will act upon.

* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Schedule Trigger.
Expand All @@ -92,6 +94,14 @@ A `monthly` block supports the following:

* `week` - (Optional) The occurrence of the specified day during the month. For example, a `monthly` property with `weekday` and `week` values of `Sunday, -1` means the last Sunday of the month.

---

A `pipeline` block supports the following:

* `name` - (Required) Reference pipeline name.

* `parameters` - (Optional) The pipeline parameters that the trigger will act upon.

## Attributes Reference

The following attributes are exported:
Expand Down

0 comments on commit 81d2806

Please sign in to comment.