diff --git a/azurerm/config.go b/azurerm/config.go index 2d4926266a9f..7b00a91f1c34 100644 --- a/azurerm/config.go +++ b/azurerm/config.go @@ -227,6 +227,7 @@ type ArmClient struct { // Data Factory dataFactoryClient datafactory.FactoriesClient + dataFactoryDatasetClient datafactory.DatasetsClient dataFactoryLinkedServiceClient datafactory.LinkedServicesClient // Data Lake Store @@ -891,6 +892,10 @@ func (c *ArmClient) registerDataFactoryClients(endpoint, subscriptionId string, c.configureClient(&dataFactoryClient.Client, auth) c.dataFactoryClient = dataFactoryClient + dataFactoryDatasetClient := datafactory.NewDatasetsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&dataFactoryDatasetClient.Client, auth) + c.dataFactoryDatasetClient = dataFactoryDatasetClient + dataFactoryLinkedServiceClient := datafactory.NewLinkedServicesClientWithBaseURI(endpoint, subscriptionId) c.configureClient(&dataFactoryLinkedServiceClient.Client, auth) c.dataFactoryLinkedServiceClient = dataFactoryLinkedServiceClient diff --git a/azurerm/data_factory.go b/azurerm/data_factory.go new file mode 100644 index 000000000000..48db9dbf683c --- /dev/null +++ b/azurerm/data_factory.go @@ -0,0 +1,54 @@ +package azurerm + +import ( + "log" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" +) + +func expandDataFactoryParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification { + output := make(map[string]*datafactory.ParameterSpecification) + + for k, v := range input { + output[k] = &datafactory.ParameterSpecification{ + Type: datafactory.ParameterTypeString, + DefaultValue: v.(string), + } + } + + return output +} + +func flattenDataFactoryParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} { + output := make(map[string]interface{}) + + for k, v := range input { + if v != nil { + // we only support string parameters at this time + val, ok := v.DefaultValue.(string) + if !ok { + log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k) + } + + output[k] = val + } + } + + return output +} + +func flattenDataFactoryAnnotations(input *[]interface{}) []string { + annotations := make([]string, 0) + if input == nil { + return annotations + } + + for _, annotation := range *input { + val, ok := annotation.(string) + if !ok { + log.Printf("[DEBUG] Skipping annotation %q since it's not a string", val) + } + annotations = append(annotations, val) + } + return annotations +} diff --git a/azurerm/provider.go b/azurerm/provider.go index 906409f26048..7aa0cf949e56 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -220,6 +220,7 @@ func Provider() terraform.ResourceProvider { "azurerm_container_service": resourceArmContainerService(), "azurerm_cosmosdb_account": resourceArmCosmosDBAccount(), "azurerm_data_factory": resourceArmDataFactory(), + "azurerm_data_factory_dataset_sql_server_table": resourceArmDataFactoryDatasetSQLServerTable(), "azurerm_data_factory_linked_service_sql_server": resourceArmDataFactoryLinkedServiceSQLServer(), "azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(), "azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(), diff --git a/azurerm/resource_arm_data_factory_dataset_sql_server_table.go b/azurerm/resource_arm_data_factory_dataset_sql_server_table.go new file mode 100644 index 000000000000..454e1e7c0fc9 --- /dev/null +++ b/azurerm/resource_arm_data_factory_dataset_sql_server_table.go @@ -0,0 +1,369 @@ +package azurerm + +import ( + "fmt" + "log" + "regexp" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataFactoryDatasetSQLServerTable() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataFactoryDatasetSQLServerTableCreateOrUpdate, + Read: resourceArmDataFactoryDatasetSQLServerTableRead, + Update: resourceArmDataFactoryDatasetSQLServerTableCreateOrUpdate, + Delete: resourceArmDataFactoryDatasetSQLServerTableDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName, + }, + + "data_factory_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`), + `Invalid name for Data Factory, see https://docs.microsoft.com/en-us/azure/data-factory/naming-rules`, + ), + }, + + "resource_group_name": resourceGroupNameSchema(), + + "linked_service_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "table_name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "parameters": { + Type: schema.TypeMap, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "folder": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "additional_properties": { + Type: schema.TypeMap, + Optional: true, + }, + + "schema_column": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + "type": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + "Byte", + "Byte[]", + "Boolean", + "Date", + "DateTime", + "DateTimeOffset", + "Decimal", + "Double", + "Guid", + "Int16", + "Int32", + "Int64", + "Single", + "String", + "TimeSpan", + }, false), + }, + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + }, + }, + }, + }, + } +} + +func resourceArmDataFactoryDatasetSQLServerTableCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryDatasetClient + ctx := meta.(*ArmClient).StopContext + + name := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if requireResourcesToBeImported && d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_dataset_sql_server", *existing.ID) + } + } + + sqlServerDatasetProperties := datafactory.SQLServerTableDatasetTypeProperties{ + TableName: d.Get("table_name").(string), + } + + linkedServiceName := d.Get("linked_service_name").(string) + linkedServiceType := "LinkedServiceReference" + linkedService := &datafactory.LinkedServiceReference{ + ReferenceName: &linkedServiceName, + Type: &linkedServiceType, + } + + description := d.Get("description").(string) + sqlServerTableset := datafactory.SQLServerTableDataset{ + SQLServerTableDatasetTypeProperties: &sqlServerDatasetProperties, + LinkedServiceName: linkedService, + Description: &description, + } + + if v, ok := d.GetOk("folder"); ok { + name := v.(string) + sqlServerTableset.Folder = &datafactory.DatasetFolder{ + Name: &name, + } + } + + if v, ok := d.GetOk("parameters"); ok { + sqlServerTableset.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + sqlServerTableset.Annotations = &annotations + } + + if v, ok := d.GetOk("additional_properties"); ok { + sqlServerTableset.AdditionalProperties = v.(map[string]interface{}) + } + + if v, ok := d.GetOk("schema_column"); ok { + sqlServerTableset.Structure = expandDataFactoryDatasetStructure(v.([]interface{})) + } + + datasetType := string(datafactory.TypeSQLServerTable) + dataset := datafactory.DatasetResource{ + Properties: &sqlServerTableset, + Type: &datasetType, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, dataset, ""); err != nil { + return fmt.Errorf("Error creating/updating Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Error retrieving Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + if resp.ID == nil { + return fmt.Errorf("Cannot read Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + d.SetId(*resp.ID) + + return resourceArmDataFactoryDatasetSQLServerTableRead(d, meta) +} + +func resourceArmDataFactoryDatasetSQLServerTableRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryDatasetClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resourceGroup) + d.Set("data_factory_name", dataFactoryName) + + sqlServerTable, ok := resp.Properties.AsSQLServerTableDataset() + if !ok { + return fmt.Errorf("Error classifiying Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeSQLServerTable, *resp.Type) + } + + d.Set("additional_properties", sqlServerTable.AdditionalProperties) + + if sqlServerTable.Description != nil { + d.Set("description", sqlServerTable.Description) + } + + parameters := flattenDataFactoryParameters(sqlServerTable.Parameters) + if err := d.Set("parameters", parameters); err != nil { + return fmt.Errorf("Error setting `parameters`: %+v", err) + } + + annotations := flattenDataFactoryAnnotations(sqlServerTable.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations`: %+v", err) + } + + if linkedService := sqlServerTable.LinkedServiceName; linkedService != nil { + if linkedService.ReferenceName != nil { + d.Set("linked_service_name", linkedService.ReferenceName) + } + } + + if properties := sqlServerTable.SQLServerTableDatasetTypeProperties; properties != nil { + val, ok := properties.TableName.(string) + if !ok { + log.Printf("[DEBUG] Skipping `table_name` since it's not a string") + } else { + d.Set("table_name", val) + } + } + + if folder := sqlServerTable.Folder; folder != nil { + if folder.Name != nil { + d.Set("folder", folder.Name) + } + } + + structureColumns := flattenDataFactoryStructureColumns(sqlServerTable.Structure) + if err := d.Set("schema_column", structureColumns); err != nil { + return fmt.Errorf("Error setting `schema_column`: %+v", err) + } + + return nil +} + +func resourceArmDataFactoryDatasetSQLServerTableDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryDatasetClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("Error deleting Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + } + + return nil +} + +// DatasetColumn describes the attributes needed to specify a structure column for a dataset +type DatasetColumn struct { + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Type string `json:"type,omitempty"` +} + +func expandDataFactoryDatasetStructure(input []interface{}) interface{} { + columns := make([]DatasetColumn, 0) + for _, column := range input { + attrs := column.(map[string]interface{}) + + datasetColumn := DatasetColumn{ + Name: attrs["name"].(string), + } + if attrs["description"] != nil { + datasetColumn.Description = attrs["description"].(string) + } + if attrs["type"] != nil { + datasetColumn.Type = attrs["type"].(string) + } + columns = append(columns, datasetColumn) + } + return columns +} + +func flattenDataFactoryStructureColumns(input interface{}) []interface{} { + output := make([]interface{}, 0) + + columns, ok := input.([]interface{}) + if !ok { + return columns + } + + for _, v := range columns { + column, ok := v.(map[string]interface{}) + if !ok { + continue + } + result := make(map[string]interface{}) + if column["name"] != nil { + result["name"] = column["name"] + } + if column["type"] != nil { + result["type"] = column["type"] + } + if column["description"] != nil { + result["description"] = column["description"] + } + output = append(output, result) + } + return output +} diff --git a/azurerm/resource_arm_data_factory_dataset_sql_server_table_test.go b/azurerm/resource_arm_data_factory_dataset_sql_server_table_test.go new file mode 100644 index 000000000000..30107bfa3668 --- /dev/null +++ b/azurerm/resource_arm_data_factory_dataset_sql_server_table_test.go @@ -0,0 +1,273 @@ +package azurerm + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform/helper/acctest" +) + +func TestAccAzureRMDataFactoryDatasetSQLServerTable_basic(t *testing.T) { + ri := acctest.RandInt() + config := testAccAzureRMDataFactoryDatasetSQLServerTable_basic(ri, testLocation()) + resourceName := "azurerm_data_factory_dataset_sql_server_table.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetSQLServerTableExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataFactoryDatasetSQLServerTable_update(t *testing.T) { + ri := acctest.RandInt() + config := testAccAzureRMDataFactoryDatasetSQLServerTable_update1(ri, testLocation()) + config2 := testAccAzureRMDataFactoryDatasetSQLServerTable_update2(ri, testLocation()) + resourceName := "azurerm_data_factory_dataset_sql_server_table.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetSQLServerTableExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(resourceName, "schema_column.#", "1"), + resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "2"), + resource.TestCheckResourceAttr(resourceName, "description", "test description"), + ), + }, + { + Config: config2, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetSQLServerTableExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "parameters.%", "3"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"), + resource.TestCheckResourceAttr(resourceName, "schema_column.#", "2"), + resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "1"), + resource.TestCheckResourceAttr(resourceName, "description", "test description 2"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testCheckAzureRMDataFactoryDatasetSQLServerTableExists(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[name] + if !ok { + return fmt.Errorf("Not found: %s", name) + } + + name := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for Data Factory: %s", name) + } + + client := testAccProvider.Meta().(*ArmClient).dataFactoryDatasetClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Bad: Get on dataFactoryDatasetClient: %+v", err) + } + + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Data Factory Dataset SQL Server Table %q (data factory name: %q / resource group: %q) does not exist", name, dataFactoryName, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy(s *terraform.State) error { + client := testAccProvider.Meta().(*ArmClient).dataFactoryDatasetClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_factory_dataset_sql_server_table" { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Data Factory Dataset SQL Server Table still exists:\n%#v", resp.Properties) + } + } + + return nil +} + +func testAccAzureRMDataFactoryDatasetSQLServerTable_basic(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "test" { + name = "acctestlssql%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "test" { + name = "acctestds%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" +} +`, rInt, location, rInt, rInt, rInt) +} + +func testAccAzureRMDataFactoryDatasetSQLServerTable_update1(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "test" { + name = "acctestlssql%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "test" { + name = "acctestds%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" + + description = "test description" + annotations = ["test1", "test2", "test3"] + table_name = "testTable" + folder = "testFolder" + + parameters { + "foo" = "test1" + "bar" = "test2" + } + + additional_properties { + "foo" = "test1" + "bar" = "test2" + } + + schema_column { + name = "test1" + type = "Byte" + description = "description" + } +} +`, rInt, location, rInt, rInt, rInt) +} + +func testAccAzureRMDataFactoryDatasetSQLServerTable_update2(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "test" { + name = "acctestlssql%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "test" { + name = "acctestds%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" + + description = "test description 2" + annotations = ["test1", "test2"] + table_name = "testTable" + folder = "testFolder" + + parameters { + "foo" = "test1" + "bar" = "test2" + "buzz" = "test3" + } + + additional_properties { + "foo" = "test1" + } + + schema_column { + name = "test1" + type = "Byte" + description = "description" + } + + schema_column { + name = "test2" + type = "Byte" + description = "description" + } +} +`, rInt, location, rInt, rInt, rInt) +} diff --git a/azurerm/resource_arm_data_factory_linked_service_sql_server.go b/azurerm/resource_arm_data_factory_linked_service_sql_server.go index 157fe98e2bd9..145deade7b21 100644 --- a/azurerm/resource_arm_data_factory_linked_service_sql_server.go +++ b/azurerm/resource_arm_data_factory_linked_service_sql_server.go @@ -31,7 +31,7 @@ func resourceArmDataFactoryLinkedServiceSQLServer() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validateAzureRMDataFactoryLinkedServiceName, + ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName, }, "data_factory_name": { @@ -120,7 +120,7 @@ func resourceArmDataFactoryLinkedServiceSQLServerCreateOrUpdate(d *schema.Resour } if v, ok := d.GetOk("parameters"); ok { - sqlServerLinkedService.Parameters = expandDataFactoryLinkedServiceParameters(v.(map[string]interface{})) + sqlServerLinkedService.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) } if v, ok := d.GetOk("integration_runtime_name"); ok { @@ -128,11 +128,12 @@ func resourceArmDataFactoryLinkedServiceSQLServerCreateOrUpdate(d *schema.Resour } if v, ok := d.GetOk("additional_properties"); ok { - sqlServerLinkedService.AdditionalProperties = expandDataFactoryLinkedServiceAdditionalProperties(v.(map[string]interface{})) + sqlServerLinkedService.AdditionalProperties = v.(map[string]interface{}) } if v, ok := d.GetOk("annotations"); ok { - sqlServerLinkedService.Annotations = expandDataFactoryLinkedServiceAnnotations(v.([]interface{})) + annotations := v.([]interface{}) + sqlServerLinkedService.Annotations = &annotations } linkedService := datafactory.LinkedServiceResource{ @@ -194,12 +195,12 @@ func resourceArmDataFactoryLinkedServiceSQLServerRead(d *schema.ResourceData, me d.Set("description", *sqlServer.Description) } - annotations := flattenDataFactoryLinkedServiceAnnotations(sqlServer.Annotations) + annotations := flattenDataFactoryAnnotations(sqlServer.Annotations) if err := d.Set("annotations", annotations); err != nil { return fmt.Errorf("Error setting `annotations`: %+v", err) } - parameters := flattenDataFactoryLinkedServiceParameters(sqlServer.Parameters) + parameters := flattenDataFactoryParameters(sqlServer.Parameters) if err := d.Set("parameters", parameters); err != nil { return fmt.Errorf("Error setting `parameters`: %+v", err) } @@ -278,7 +279,7 @@ func azureRmDataFactoryLinkedServiceConnectionStringDiff(k, old string, new stri return true } -func validateAzureRMDataFactoryLinkedServiceName(v interface{}, k string) (warnings []string, errors []error) { +func validateAzureRMDataFactoryLinkedServiceDatasetName(v interface{}, k string) (warnings []string, errors []error) { value := v.(string) if regexp.MustCompile(`^[-.+?/<>*%&:\\]+$`).MatchString(value) { errors = append(errors, fmt.Errorf("any of '-' '.', '+', '?', '/', '<', '>', '*', '%%', '&', ':', '\\', are not allowed in %q: %q", k, value)) @@ -295,70 +296,3 @@ func expandDataFactoryLinkedServiceIntegrationRuntime(integrationRuntimeName str Type: &typeString, } } - -func expandDataFactoryLinkedServiceParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification { - output := make(map[string]*datafactory.ParameterSpecification) - - for k, v := range input { - output[k] = &datafactory.ParameterSpecification{ - Type: datafactory.ParameterTypeString, - DefaultValue: v.(string), - } - } - - return output -} - -func expandDataFactoryLinkedServiceAdditionalProperties(input map[string]interface{}) map[string]interface{} { - output := make(map[string]interface{}) - - for k, v := range input { - output[k] = v - } - - return output -} - -func flattenDataFactoryLinkedServiceParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} { - output := make(map[string]interface{}) - - for k, v := range input { - if v != nil { - // we only support string parameters at this time - val, ok := v.DefaultValue.(string) - if !ok { - log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k) - } - - output[k] = val - } - } - - return output -} - -func expandDataFactoryLinkedServiceAnnotations(input []interface{}) *[]interface{} { - annotations := make([]interface{}, 0) - - for _, annotation := range input { - annotations = append(annotations, annotation.(string)) - } - - return &annotations -} - -func flattenDataFactoryLinkedServiceAnnotations(input *[]interface{}) []string { - annotations := make([]string, 0) - if input == nil { - return annotations - } - - for _, annotation := range *input { - val, ok := annotation.(string) - if !ok { - log.Printf("[DEBUG] Skipping annotation %q since it's not a string", val) - } - annotations = append(annotations, val) - } - return annotations -} diff --git a/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go b/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go index 1248b89de068..a2d7503aba00 100644 --- a/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go +++ b/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go @@ -65,10 +65,10 @@ func TestAccAzureRMDataFactoryLinkedServiceSQLServer_basic(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataFactoryLinkedServiceSQLServerExists(resourceName), resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"), - resource.TestCheckResourceAttrSet(resourceName, "connection_string"), resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"), resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "2"), resource.TestCheckResourceAttr(resourceName, "description", "test description"), + resource.TestCheckResourceAttrSet(resourceName, "connection_string"), ), }, { @@ -76,10 +76,10 @@ func TestAccAzureRMDataFactoryLinkedServiceSQLServer_basic(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataFactoryLinkedServiceSQLServerExists(resourceName), resource.TestCheckResourceAttr(resourceName, "parameters.%", "3"), - resource.TestCheckResourceAttrSet(resourceName, "connection_string"), resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"), resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "1"), resource.TestCheckResourceAttr(resourceName, "description", "test description 2"), + resource.TestCheckResourceAttrSet(resourceName, "connection_string"), ), }, { diff --git a/website/azurerm.erb b/website/azurerm.erb index ce9b1e1946b2..2764e1064d28 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -728,6 +728,9 @@