Skip to content

Commit

Permalink
Add Support for Azure Blob Storage Transfer (#4751) (#9311)
Browse files Browse the repository at this point in the history
* mark field as updatable

Co-authored-by: upodroid <[email protected]>

* add azure support

Co-authored-by: upodroid <[email protected]>

* fix typo

* change path options

* revert doc change

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Jun 4, 2021
1 parent 85979d4 commit ce331bb
Show file tree
Hide file tree
Showing 3 changed files with 123 additions and 9 deletions.
3 changes: 3 additions & 0 deletions .changelog/4751.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
storage-transfer: added support for `azure_blob_storage_data_source` to `google_storage_transfer_job`
```
109 changes: 102 additions & 7 deletions google/resource_storage_transfer_job.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ var (
"transfer_spec.0.gcs_data_source",
"transfer_spec.0.aws_s3_data_source",
"transfer_spec.0.http_data_source",
"transfer_spec.0.azure_blob_storage_data_source",
}
)

Expand Down Expand Up @@ -99,7 +100,15 @@ func resourceStorageTransferJob() *schema.Resource {
MaxItems: 1,
Elem: httpDataSchema(),
ExactlyOneOf: transferSpecDataSourceKeys,
Description: `An HTTP URL data source.`,
Description: `A HTTP URL data source.`,
},
"azure_blob_storage_data_source": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: azureBlobStorageDataSchema(),
ExactlyOneOf: transferSpecDataSourceKeys,
Description: `An Azure Blob Storage data source.`,
},
},
},
Expand Down Expand Up @@ -370,6 +379,45 @@ func httpDataSchema() *schema.Resource {
}
}

func azureBlobStorageDataSchema() *schema.Resource {
return &schema.Resource{
Schema: map[string]*schema.Schema{
"storage_account": {
Required: true,
Type: schema.TypeString,
Description: `The name of the Azure Storage account.`,
},
"container": {
Required: true,
Type: schema.TypeString,
Description: `The container to transfer from the Azure Storage account.`,
},
"path": {
Optional: true,
Computed: true,
Type: schema.TypeString,
Description: `Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.`,
},
"azure_credentials": {
Type: schema.TypeList,
Required: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"sas_token": {
Type: schema.TypeString,
Required: true,
Sensitive: true,
Description: `Azure shared access signature.`,
},
},
},
Description: ` Credentials used to authenticate API requests to Azure.`,
},
},
}
}

func diffSuppressEmptyStartTimeOfDay(k, old, new string, d *schema.ResourceData) bool {
return k == "schedule.0.start_time_of_day.#" && old == "1" && new == "0"
}
Expand Down Expand Up @@ -769,6 +817,50 @@ func flattenHttpData(httpData *storagetransfer.HttpData) []map[string]interface{
return []map[string]interface{}{data}
}

func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.AzureCredentials {
if len(azureCredentials) == 0 || azureCredentials[0] == nil {
return nil
}

azureCredential := azureCredentials[0].(map[string]interface{})
return &storagetransfer.AzureCredentials{
SasToken: azureCredential["sas_token"].(string),
}
}

func flattenAzureCredentials(d *schema.ResourceData) []map[string]interface{} {
data := map[string]interface{}{
"sas_token": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token"),
}

return []map[string]interface{}{data}
}

func expandAzureBlobStorageData(azureBlobStorageDatas []interface{}) *storagetransfer.AzureBlobStorageData {
if len(azureBlobStorageDatas) == 0 || azureBlobStorageDatas[0] == nil {
return nil
}

azureBlobStorageData := azureBlobStorageDatas[0].(map[string]interface{})
return &storagetransfer.AzureBlobStorageData{
Container: azureBlobStorageData["container"].(string),
Path: azureBlobStorageData["path"].(string),
StorageAccount: azureBlobStorageData["storage_account"].(string),
AzureCredentials: expandAzureCredentials(azureBlobStorageData["sas_token"].([]interface{})),
}
}

func flattenAzureBlobStorageData(azureBlobStorageData *storagetransfer.AzureBlobStorageData, d *schema.ResourceData) []map[string]interface{} {
data := map[string]interface{}{
"container": azureBlobStorageData.Container,
"path": azureBlobStorageData.Path,
"storage_account": azureBlobStorageData.StorageAccount,
"azure_credentials": flattenAzureCredentials(d),
}

return []map[string]interface{}{data}
}

func expandObjectConditions(conditions []interface{}) *storagetransfer.ObjectConditions {
if len(conditions) == 0 || conditions[0] == nil {
return nil
Expand Down Expand Up @@ -823,12 +915,13 @@ func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferS

transferSpec := transferSpecs[0].(map[string]interface{})
return &storagetransfer.TransferSpec{
GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})),
ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})),
TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})),
GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})),
AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})),
HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})),
GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})),
ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})),
TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})),
GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})),
AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})),
HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})),
AzureBlobStorageDataSource: expandAzureBlobStorageData(transferSpec["azure_blob_storage_data_source"].([]interface{})),
}
}

Expand All @@ -850,6 +943,8 @@ func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.R
data["aws_s3_data_source"] = flattenAwsS3Data(transferSpec.AwsS3DataSource, d)
} else if transferSpec.HttpDataSource != nil {
data["http_data_source"] = flattenHttpData(transferSpec.HttpDataSource)
} else if transferSpec.AzureBlobStorageDataSource != nil {
data["azure_blob_storage_data_source"] = flattenAzureBlobStorageData(transferSpec.AzureBlobStorageDataSource, d)
}

return []map[string][]map[string]interface{}{data}
Expand Down
20 changes: 18 additions & 2 deletions website/docs/r/storage_transfer_job.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Creates a new Transfer Job in Google Cloud Storage Transfer.
To get more information about Google Cloud Storage Transfer, see:

* [Overview](https://cloud.google.com/storage-transfer/docs/overview)
* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob)
* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs)
* How-to Guides
* [Configuring Access to Data Sources and Sinks](https://cloud.google.com/storage-transfer/docs/configure-access)

Expand Down Expand Up @@ -118,7 +118,9 @@ The `transfer_spec` block supports:

* `aws_s3_data_source` - (Optional) An AWS S3 data source. Structure documented below.

* `http_data_source` - (Optional) An HTTP URL data source. Structure documented below.
* `http_data_source` - (Optional) A HTTP URL data source. Structure documented below.

* `azure_blob_storage_data_source` - (Optional) An Azure Blob Storage data source. Structure documented below.

The `schedule` block supports:

Expand Down Expand Up @@ -172,6 +174,20 @@ The `http_data_source` block supports:

* `list_url` - (Required) The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.

The `azure_blob_storage_data_source` block supports:

* `storage_account` - (Required) The name of the Azure Storage account.

* `container` - (Required) The container to transfer from the Azure Storage account.`

* `path` - (Required) Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.

* `azure_credentials` - (Required) Credentials used to authenticate API requests to Azure block.

The `azure_credentials` block supports:

* `sas_token` - (Required) Azure shared access signature. See [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview).

The `schedule_start_date` and `schedule_end_date` blocks support:

* `year` - (Required) Year of date. Must be from 1 to 9999.
Expand Down

0 comments on commit ce331bb

Please sign in to comment.