diff --git a/azurerm/config.go b/azurerm/config.go index 7b00a91f1c34..d669b68bc3d9 100644 --- a/azurerm/config.go +++ b/azurerm/config.go @@ -226,6 +226,7 @@ type ArmClient struct { sqlVirtualNetworkRulesClient sql.VirtualNetworkRulesClient // Data Factory + dataFactoryPipelineClient datafactory.PipelinesClient dataFactoryClient datafactory.FactoriesClient dataFactoryDatasetClient datafactory.DatasetsClient dataFactoryLinkedServiceClient datafactory.LinkedServicesClient @@ -899,6 +900,10 @@ func (c *ArmClient) registerDataFactoryClients(endpoint, subscriptionId string, dataFactoryLinkedServiceClient := datafactory.NewLinkedServicesClientWithBaseURI(endpoint, subscriptionId) c.configureClient(&dataFactoryLinkedServiceClient.Client, auth) c.dataFactoryLinkedServiceClient = dataFactoryLinkedServiceClient + + dataFactoryPipelineClient := datafactory.NewPipelinesClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&dataFactoryPipelineClient.Client, auth) + c.dataFactoryPipelineClient = dataFactoryPipelineClient } func (c *ArmClient) registerDataLakeStoreClients(endpoint, subscriptionId string, auth autorest.Authorizer) { diff --git a/azurerm/data_factory.go b/azurerm/data_factory.go index 48db9dbf683c..5a2de0759b00 100644 --- a/azurerm/data_factory.go +++ b/azurerm/data_factory.go @@ -52,3 +52,34 @@ func flattenDataFactoryAnnotations(input *[]interface{}) []string { } return annotations } + +func expandDataFactoryVariables(input map[string]interface{}) map[string]*datafactory.VariableSpecification { + output := make(map[string]*datafactory.VariableSpecification) + + for k, v := range input { + output[k] = &datafactory.VariableSpecification{ + Type: datafactory.VariableTypeString, + DefaultValue: v.(string), + } + } + + return output +} + +func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecification) map[string]interface{} { + output := make(map[string]interface{}) + + for k, v := range input { + if v != nil { + // we only support string parameters at this time + val, ok := v.DefaultValue.(string) + if !ok { + log.Printf("[DEBUG] Skipping variable %q since it's not a string", k) + } + + output[k] = val + } + } + + return output +} diff --git a/azurerm/provider.go b/azurerm/provider.go index 7aa0cf949e56..80f873c86754 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -222,6 +222,7 @@ func Provider() terraform.ResourceProvider { "azurerm_data_factory": resourceArmDataFactory(), "azurerm_data_factory_dataset_sql_server_table": resourceArmDataFactoryDatasetSQLServerTable(), "azurerm_data_factory_linked_service_sql_server": resourceArmDataFactoryLinkedServiceSQLServer(), + "azurerm_data_factory_pipeline": resourceArmDataFactoryPipeline(), "azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(), "azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(), "azurerm_data_lake_store_file": resourceArmDataLakeStoreFile(), diff --git a/azurerm/resource_arm_data_factory_pipeline.go b/azurerm/resource_arm_data_factory_pipeline.go new file mode 100644 index 000000000000..f8db579821ec --- /dev/null +++ b/azurerm/resource_arm_data_factory_pipeline.go @@ -0,0 +1,205 @@ +package azurerm + +import ( + "fmt" + "log" + "regexp" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataFactoryPipeline() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataFactoryPipelineCreateUpdate, + Read: resourceArmDataFactoryPipelineRead, + Update: resourceArmDataFactoryPipelineCreateUpdate, + Delete: resourceArmDataFactoryPipelineDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMDataFactoryPipelineName, + }, + + "data_factory_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`), + `Invalid data_factory_name, see https://docs.microsoft.com/en-us/azure/data-factory/naming-rules`, + ), + }, + + "resource_group_name": resourceGroupNameSchema(), + + "parameters": { + Type: schema.TypeMap, + Optional: true, + }, + + "variables": { + Type: schema.TypeMap, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + } +} + +func resourceArmDataFactoryPipelineCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryPipelineClient + ctx := meta.(*ArmClient).StopContext + + log.Printf("[INFO] preparing arguments for Data Factory Pipeline creation.") + + resourceGroupName := d.Get("resource_group_name").(string) + name := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + + if requireResourcesToBeImported && d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %s", name, resourceGroupName, dataFactoryName, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_pipeline", *existing.ID) + } + } + + description := d.Get("description").(string) + pipeline := &datafactory.Pipeline{ + Parameters: expandDataFactoryParameters(d.Get("parameters").(map[string]interface{})), + Variables: expandDataFactoryVariables(d.Get("variables").(map[string]interface{})), + Description: &description, + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + pipeline.Annotations = &annotations + } else { + annotations := make([]interface{}, 0) + pipeline.Annotations = &annotations + } + + config := datafactory.PipelineResource{ + Pipeline: pipeline, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroupName, dataFactoryName, name, config, ""); err != nil { + return fmt.Errorf("Error creating Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err) + } + + read, err := client.Get(ctx, resourceGroupName, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Error retrieving Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err) + } + + if read.ID == nil { + return fmt.Errorf("Cannot read Data Factory Pipeline %q (Resource Group %q / Data Factory %q) ID", name, resourceGroupName, dataFactoryName) + } + + d.SetId(*read.ID) + + return resourceArmDataFactoryPipelineRead(d, meta) +} + +func resourceArmDataFactoryPipelineRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryPipelineClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + dataFactoryName := id.Path["factories"] + name := id.Path["pipelines"] + + resp, err := client.Get(ctx, id.ResourceGroup, dataFactoryName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + log.Printf("[DEBUG] Data Factory Pipeline %q was not found in Resource Group %q - removing from state!", name, id.ResourceGroup) + return nil + } + return fmt.Errorf("Error reading the state of Data Factory Pipeline %q: %+v", name, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("data_factory_name", dataFactoryName) + + if props := resp.Pipeline; props != nil { + d.Set("description", props.Description) + + parameters := flattenDataFactoryParameters(props.Parameters) + if err := d.Set("parameters", parameters); err != nil { + return fmt.Errorf("Error setting `parameters`: %+v", err) + } + + annotations := flattenDataFactoryAnnotations(props.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations`: %+v", err) + } + + variables := flattenDataFactoryVariables(props.Variables) + if err := d.Set("variables", variables); err != nil { + return fmt.Errorf("Error setting `variables`: %+v", err) + } + + } + + return nil +} + +func resourceArmDataFactoryPipelineDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryPipelineClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + dataFactoryName := id.Path["factories"] + name := id.Path["pipelines"] + resourceGroupName := id.ResourceGroup + + if _, err = client.Delete(ctx, resourceGroupName, dataFactoryName, name); err != nil { + return fmt.Errorf("Error deleting Data Factory Pipeline %q (Resource Group %q / Data Factory %q): %+v", name, resourceGroupName, dataFactoryName, err) + } + + return nil +} + +func validateAzureRMDataFactoryPipelineName(v interface{}, k string) (warnings []string, errors []error) { + value := v.(string) + if regexp.MustCompile(`^[.+?/<>*%&:\\]+$`).MatchString(value) { + errors = append(errors, fmt.Errorf("any of '.', '+', '?', '/', '<', '>', '*', '%%', '&', ':', '\\', are not allowed in %q: %q", k, value)) + } + + return warnings, errors +} diff --git a/azurerm/resource_arm_data_factory_pipeline_test.go b/azurerm/resource_arm_data_factory_pipeline_test.go new file mode 100644 index 000000000000..a446044b2f85 --- /dev/null +++ b/azurerm/resource_arm_data_factory_pipeline_test.go @@ -0,0 +1,215 @@ +package azurerm + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) { + resourceName := "azurerm_data_factory_pipeline.test" + ri := tf.AccRandTimeInt() + location := testLocation() + config := testAccAzureRMDataFactoryPipeline_basic(ri, location) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryPipelineDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryPipelineExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataFactoryPipeline_update(t *testing.T) { + resourceName := "azurerm_data_factory_pipeline.test" + ri := tf.AccRandTimeInt() + location := testLocation() + config := testAccAzureRMDataFactoryPipeline_update1(ri, location) + config2 := testAccAzureRMDataFactoryPipeline_update2(ri, location) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryPipelineDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryPipelineExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "parameters.%", "1"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(resourceName, "description", "test description"), + resource.TestCheckResourceAttr(resourceName, "variables.%", "2"), + ), + }, + { + Config: config2, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryPipelineExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"), + resource.TestCheckResourceAttr(resourceName, "description", "test description2"), + resource.TestCheckResourceAttr(resourceName, "variables.%", "3"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testCheckAzureRMDataFactoryPipelineDestroy(s *terraform.State) error { + client := testAccProvider.Meta().(*ArmClient).dataFactoryPipelineClient + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_factory_pipeline" { + continue + } + + name := rs.Primary.Attributes["name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + + if err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return err + } + } + + return nil + } + return nil +} + +func testCheckAzureRMDataFactoryPipelineExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + client := testAccProvider.Meta().(*ArmClient).dataFactoryPipelineClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Data Factory Pipeline %q (Resource Group %q / Data Factory %q) does not exist", name, resourceGroup, dataFactoryName) + } + return fmt.Errorf("Bad: Get on DataFactoryPipelineClient: %+v", err) + } + + return nil + } +} + +func testAccAzureRMDataFactoryPipeline_basic(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdfv2%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" +} +`, rInt, location, rInt, rInt) +} + +func testAccAzureRMDataFactoryPipeline_update1(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdfv2%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + annotations = ["test1", "test2", "test3"] + description = "test description" + + parameters = { + test = "testparameter" + } + + variables { + "foo" = "test1" + "bar" = "test2" + } +} +`, rInt, location, rInt, rInt) +} + +func testAccAzureRMDataFactoryPipeline_update2(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdfv2%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + annotations = ["test1", "test2"] + description = "test description2" + + parameters = { + test = "testparameter" + test2 = "testparameter2" + } + + variables { + "foo" = "test1" + "bar" = "test2" + "baz" = "test3" + } +} +`, rInt, location, rInt, rInt) +} diff --git a/website/docs/r/data_factory_pipeline.html.markdown b/website/docs/r/data_factory_pipeline.html.markdown new file mode 100644 index 000000000000..832141075bbf --- /dev/null +++ b/website/docs/r/data_factory_pipeline.html.markdown @@ -0,0 +1,64 @@ +--- +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_pipeline" +sidebar_current: "docs-azurerm-resource-data-factory-pipeline" +description: |- + Manage a Pipeline inside a Azure Data Factory. +--- + +# azurerm_data_factory_pipeline + +Manage a Pipeline inside a Azure Data Factory. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "northeurope" +} + +resource "azurerm_data_factory" "example" { + name = "example" + location = "${azurerm_resource_group.example.location}" + resource_group_name = "${azurerm_resource_group.example.name}" +} + +resource "azurerm_data_factory_pipeline" "example" { + name = "example" + resource_group_name = "${azurerm_resource_group.example.name}" + data_factory_name = "${azurerm_data_factory.example.name}" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Pipeline. Changing this forces a new resource + +* `data_factory_name` - (Required) The Data Factory name in which to associate the Pipeline with. Changing this forces a new resource. + +* `description` - (Optional) The description for the Data Factory Pipeline. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Pipeline. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory Pipeline. + +* `variables` - (Optional) A map of variables to associate with the Data Factory Pipeline. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The ID of the Data Factory Pipeline. + +## Import + +Data Factory Pipeline can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_pipeline.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/pipelines/example +```