Skip to content

Commit

Permalink
azurerm_data_factory_dataset_delimited_text - supports the `azure_blo…
Browse files Browse the repository at this point in the history
…b_fs_location` property (#12041)
  • Loading branch information
njuCZ authored Jun 10, 2021
1 parent 8bcb99e commit 0dd2a1b
Show file tree
Hide file tree
Showing 4 changed files with 210 additions and 26 deletions.
56 changes: 56 additions & 0 deletions azurerm/internal/services/datafactory/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,10 @@ func expandDataFactoryDatasetLocation(d *pluginsdk.ResourceData) datafactory.Bas
return expandDataFactoryDatasetAzureBlobStorageLocation(d)
}

if _, ok := d.GetOk("azure_blob_fs_location"); ok {
return expandDataFactoryDatasetAzureBlobFSLocation(d)
}

return nil
}

Expand Down Expand Up @@ -310,6 +314,27 @@ func expandDataFactoryDatasetAzureBlobStorageLocation(d *pluginsdk.ResourceData)
return blobStorageLocation
}

func expandDataFactoryDatasetAzureBlobFSLocation(d *pluginsdk.ResourceData) datafactory.BasicDatasetLocation {
azureBlobFsLocations := d.Get("azure_blob_fs_location").([]interface{})
if len(azureBlobFsLocations) == 0 || azureBlobFsLocations[0] == nil {
return nil
}
props := azureBlobFsLocations[0].(map[string]interface{})

blobStorageLocation := datafactory.AzureBlobFSLocation{
FileSystem: props["file_system"].(string),
Type: datafactory.TypeBasicDatasetLocationTypeAzureBlobFSLocation,
}
if path := props["path"].(string); len(path) > 0 {
blobStorageLocation.FolderPath = path
}
if filename := props["filename"].(string); len(filename) > 0 {
blobStorageLocation.FileName = filename
}

return blobStorageLocation
}

func flattenDataFactoryDatasetHTTPServerLocation(input *datafactory.HTTPServerLocation) []interface{} {
if input == nil {
return nil
Expand Down Expand Up @@ -347,3 +372,34 @@ func flattenDataFactoryDatasetAzureBlobStorageLocation(input *datafactory.AzureB

return []interface{}{result}
}

func flattenDataFactoryDatasetAzureBlobFSLocation(input *datafactory.AzureBlobFSLocation) []interface{} {
if input == nil {
return []interface{}{}
}

fileSystem, path, fileName := "", "", ""
if input.FileSystem != nil {
if v, ok := input.FileSystem.(string); ok {
fileSystem = v
}
}
if input.FolderPath != nil {
if v, ok := input.FolderPath.(string); ok {
path = v
}
}
if input.FileName != nil {
if v, ok := input.FileName.(string); ok {
fileName = v
}
}

return []interface{}{
map[string]interface{}{
"file_system": fileSystem,
"path": path,
"filename": fileName,
},
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,10 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {

// Delimited Text Specific Field, one option for 'location'
"http_server_location": {
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
// ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"},
ConflictsWith: []string{"azure_blob_storage_location"},
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ExactlyOneOf: []string{"http_server_location", "azure_blob_storage_location", "azure_blob_fs_location"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"relative_url": {
Expand All @@ -89,11 +88,10 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {

// Delimited Text Specific Field, one option for 'location'
"azure_blob_storage_location": {
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
// ConflictsWith: []string{"sftp_server_location", "file_server_location", "s3_location", "azure_blob_storage_location"},
ConflictsWith: []string{"http_server_location"},
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ExactlyOneOf: []string{"http_server_location", "azure_blob_storage_location", "azure_blob_fs_location"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"container": {
Expand All @@ -115,6 +113,32 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource {
},
},

"azure_blob_fs_location": {
Type: pluginsdk.TypeList,
MaxItems: 1,
Optional: true,
ExactlyOneOf: []string{"http_server_location", "azure_blob_storage_location", "azure_blob_fs_location"},
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"file_system": {
Type: pluginsdk.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"path": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
"filename": {
Type: pluginsdk.TypeString,
Optional: true,
ValidateFunc: validation.StringIsNotEmpty,
},
},
},
},

// Delimited Text Specific Field
"column_delimiter": {
Type: pluginsdk.TypeString,
Expand Down Expand Up @@ -421,14 +445,18 @@ func resourceDataFactoryDatasetDelimitedTextRead(d *pluginsdk.ResourceData, meta
}

if properties := delimited_textTable.DelimitedTextDatasetTypeProperties; properties != nil {
if httpServerLocation, ok := properties.Location.AsHTTPServerLocation(); ok {
if err := d.Set("http_server_location", flattenDataFactoryDatasetHTTPServerLocation(httpServerLocation)); err != nil {
return fmt.Errorf("Error setting `http_server_location` for Data Factory Delimited Text Dataset %s", err)
switch location := properties.Location.(type) {
case datafactory.HTTPServerLocation:
if err := d.Set("http_server_location", flattenDataFactoryDatasetHTTPServerLocation(&location)); err != nil {
return fmt.Errorf("setting `http_server_location` for Data Factory Delimited Text Dataset %s", err)
}
}
if azureBlobStorageLocation, ok := properties.Location.AsAzureBlobStorageLocation(); ok {
if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(azureBlobStorageLocation)); err != nil {
return fmt.Errorf("Error setting `azure_blob_storage_location` for Data Factory Delimited Text Dataset %s", err)
case datafactory.AzureBlobStorageLocation:
if err := d.Set("azure_blob_storage_location", flattenDataFactoryDatasetAzureBlobStorageLocation(&location)); err != nil {
return fmt.Errorf("setting `azure_blob_storage_location` for Data Factory Delimited Text Dataset %s", err)
}
case datafactory.AzureBlobFSLocation:
if err := d.Set("azure_blob_fs_location", flattenDataFactoryDatasetAzureBlobFSLocation(&location)); err != nil {
return fmt.Errorf("setting `azure_blob_fs_location` for Data Factory Delimited Text Dataset %s", err)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,21 @@ func TestAccDataFactoryDatasetDelimitedText_blob(t *testing.T) {
})
}

func TestAccDataFactoryDatasetDelimitedText_blobFS(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_delimited_text", "test")
r := DatasetDelimitedTextResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.blobFS(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
}

func (t DatasetDelimitedTextResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
id, err := azure.ParseAzureResourceID(state.ID)
if err != nil {
Expand Down Expand Up @@ -362,3 +377,77 @@ resource "azurerm_data_factory_dataset_delimited_text" "test" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}

func (DatasetDelimitedTextResource) blobFS(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-df-%d"
location = "%s"
}
resource "azurerm_storage_account" "test" {
name = "acctestsa%s"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
account_kind = "BlobStorage"
account_tier = "Standard"
account_replication_type = "LRS"
is_hns_enabled = true
allow_blob_public_access = true
}
resource "azurerm_storage_data_lake_gen2_filesystem" "test" {
name = "acctest-datalake-%d"
storage_account_id = azurerm_storage_account.test.id
}
resource "azurerm_data_factory" "test" {
name = "acctestdf%d"
location = azurerm_resource_group.test.location
resource_group_name = azurerm_resource_group.test.name
identity {
type = "SystemAssigned"
}
}
resource "azurerm_role_assignment" "test" {
scope = azurerm_storage_account.test.id
role_definition_name = "Storage Blob Data Owner"
principal_id = azurerm_data_factory.test.identity.0.principal_id
}
resource "azurerm_data_factory_linked_service_data_lake_storage_gen2" "test" {
name = "acctestDataLakeStorage%d"
resource_group_name = azurerm_resource_group.test.name
data_factory_name = azurerm_data_factory.test.name
use_managed_identity = true
url = azurerm_storage_account.test.primary_dfs_endpoint
}
resource "azurerm_data_factory_dataset_delimited_text" "test" {
name = "acctestds%d"
resource_group_name = azurerm_resource_group.test.name
data_factory_name = azurerm_data_factory.test.name
linked_service_name = azurerm_data_factory_linked_service_data_lake_storage_gen2.test.name
azure_blob_fs_location {
file_system = azurerm_storage_data_lake_gen2_filesystem.test.name
path = "foo/bar/"
filename = "a.csv"
}
column_delimiter = ","
row_delimiter = "NEW"
encoding = "UTF-8"
quote_character = "x"
escape_character = "f"
first_row_as_header = true
null_value = "NULL"
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, data.RandomInteger, data.RandomInteger, data.RandomInteger, data.RandomInteger)
}
29 changes: 20 additions & 9 deletions website/docs/r/data_factory_dataset_delimited_text.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,13 @@ The following supported arguments are common across all Azure Data Factory Datas

* `additional_properties` - (Optional) A map of additional properties to associate with the Data Factory Dataset.

The following supported locations for a Delimited Text Dataset:
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):

* `http_server_location` - (Required) A `http_server_location` block as defined below.
* `azure_blob_fs_location` - (Optional) An `azure_blob_fs_location` block as defined below.

* `azure_blob_storage_location` - (Required) A `azure_blob_storage_location` block as defined below.
* `azure_blob_storage_location` - (Optional) An `azure_blob_storage_location` block as defined below.

* `http_server_location` - (Optional) A `http_server_location` block as defined below.

The following supported arguments are specific to Delimited Text Dataset:

Expand Down Expand Up @@ -116,25 +118,34 @@ A `schema_column` block supports the following:

---

A `http_server_location` block supports the following:
An `azure_blob_fs_location` block supports the following:

* `relative_url` - (Required) The base URL to the web server hosting the file.
* `file_system` - (Required) The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

* `path` - (Required) The folder path to the file on the web server.
* `path` - (Required) The folder path to the file.

* `filename` - (Required) The filename of the file on the web server.
* `filename` - (Required) The filename of the file.

---

A `azure_blob_storage_location` block supports the following:
An `azure_blob_storage_location` block supports the following:

* `container` - (Required) The container on the Azure Blob Storage Account hosting the file.

* `path` - (Required) The folder path to the file.

* `filename` - (Required) The filename of the file.

---

A `http_server_location` block supports the following:

* `relative_url` - (Required) The base URL to the web server hosting the file.

* `path` - (Required) The folder path to the file on the web server.

* `filename` - (Required) The filename of the file on the web server.


## Attributes Reference

The following attributes are exported:
Expand Down

0 comments on commit 0dd2a1b

Please sign in to comment.