Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New Resource: 'azurerm_storage_account_customer_managed_key' to enable storage account encryption using key vault customer-managed keys #5668

Merged
merged 26 commits into from
Feb 24, 2020
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
d1cf0b7
Initial port to new code base
WodansSon Feb 10, 2020
6291f85
Merge branch 'master' into nr_storage_account_cmk
WodansSon Feb 10, 2020
8f9d064
Got it to compile
WodansSon Feb 11, 2020
10e222d
Mostly working
WodansSon Feb 11, 2020
b15d1b3
Progress
WodansSon Feb 12, 2020
99b6fe5
Make error msg more user friendly
WodansSon Feb 12, 2020
bb8ed68
Update error conditional
WodansSon Feb 12, 2020
1d4684a
Rename resource
WodansSon Feb 13, 2020
a86c893
Add SACMK doc to TOC
WodansSon Feb 13, 2020
cf39a6e
Fixed import issue
WodansSon Feb 13, 2020
5d7f77b
Updates
WodansSon Feb 14, 2020
3a45d8e
Updated doc
WodansSon Feb 14, 2020
1668aa3
Format doc hcl blocks
WodansSon Feb 14, 2020
f89146b
Merge branch 'master' into nr_storage_account_cmk
WodansSon Feb 19, 2020
b9da1f0
Merge branch 'master' of https://github.com/terraform-providers/terra…
WodansSon Feb 21, 2020
1736632
Updates to CMK
WodansSon Feb 22, 2020
3ce6b8c
r/key_vault_key: formatting
tombuildsstuff Feb 22, 2020
fb8e382
r/storage_account: locking on the correct key during update
tombuildsstuff Feb 23, 2020
8310245
r/storage_account_customer_managed_key: comments from code review
tombuildsstuff Feb 23, 2020
675364a
r/storage_account_customer_managed_key: adding validation
tombuildsstuff Feb 23, 2020
c80abda
r/key_vault_key: removing the unused `key_vault_access_policy_id` field
tombuildsstuff Feb 23, 2020
7edd1c9
r/storage_account_customer_managed_key: fixing the example
tombuildsstuff Feb 23, 2020
ab5b5b3
d/key_vault_key: removing a dead field
tombuildsstuff Feb 23, 2020
d7c87e2
d/storage_account: removing removed fields
tombuildsstuff Feb 23, 2020
fb837ad
linting
tombuildsstuff Feb 24, 2020
ef0105d
r/storage_account_customer_managed_key: adding args to the error
tombuildsstuff Feb 24, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions azurerm/helpers/azure/key_vault.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package azure
import (
"context"
"fmt"
"time"

"github.com/Azure/azure-sdk-for-go/services/keyvault/mgmt/2018-02-14/keyvault"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
Expand Down Expand Up @@ -118,3 +119,66 @@ func KeyVaultExists(ctx context.Context, client *keyvault.VaultsClient, keyVault

return true, nil
}

func KeyVaultGetSoftDeletedState(ctx context.Context, client *keyvault.VaultsClient, name string, location string) (deleteDate interface{}, purgeDate interface{}, err error) {
tombuildsstuff marked this conversation as resolved.
Show resolved Hide resolved
softDel, err := client.GetDeleted(ctx, name, location)
if err != nil {
return nil, nil, fmt.Errorf("unable to get soft delete state information: %+v", err)
}

// the logic is this way because the GetDeleted call will return an existing key vault
// that is not soft deleted, but the Deleted Vault properties will be nil
if props := softDel.Properties; props != nil {
var delDate interface{}
var purgeDate interface{}

if dd := props.DeletionDate; dd != nil {
delDate = (*dd).Format(time.RFC3339)
}
if pg := props.ScheduledPurgeDate; pg != nil {
purgeDate = (*pg).Format(time.RFC3339)
}
if delDate != nil && purgeDate != nil {
return delDate, purgeDate, nil
}
}

// this means we found an existing key vault that is not soft deleted
return nil, nil, nil
}

func KeyVaultIsSoftDeleteAndPurgeProtected(ctx context.Context, client *keyvault.VaultsClient, keyVaultId string) bool {
name, resourceGroup, err := ParseNameAndResourceGroupFromResourceId(keyVaultId, "vaults")
if err != nil {
return false
}

resp, err := client.Get(ctx, resourceGroup.(string), name.(string))

if props := resp.Properties; props != nil {
softDeleteEnabled := false
purgeProtectionEnabled := false

if esd := props.EnableSoftDelete; esd != nil {
softDeleteEnabled = *esd
}
if epp := props.EnableSoftDelete; epp != nil {
purgeProtectionEnabled = *epp
}

if softDeleteEnabled && purgeProtectionEnabled {
return true
}
}

return false
}
tombuildsstuff marked this conversation as resolved.
Show resolved Hide resolved

func ParseNameAndResourceGroupFromResourceId(resourceId string, namePathKey string) (resourceName interface{}, resourceGroup interface{}, err error) {
id, err := ParseAzureResourceID(resourceId)
if err != nil {
return nil, nil, err
}

return id.Path[namePathKey], id.ResourceGroup, nil
}
tombuildsstuff marked this conversation as resolved.
Show resolved Hide resolved
13 changes: 12 additions & 1 deletion azurerm/internal/acceptance/data.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ type TestData struct {
// Locations is a set of Azure Regions which should be used for this Test
Locations Regions

// RandomString is a random integer which is unique to this test case
// RandomInteger is a random integer which is unique to this test case
RandomInteger int

// RandomString is a random 5 character string is unique to this test case
Expand Down Expand Up @@ -90,6 +90,7 @@ func BuildTestData(t *testing.T, resourceType string, resourceLabel string) Test
return testData
}

// RandomIntOfLength is a random 8 to 18 digit integer which is unique to this test case
func (td *TestData) RandomIntOfLength(len int) int {
// len should not be
// - greater then 18, longest a int can represent
Expand All @@ -116,3 +117,13 @@ func (td *TestData) RandomIntOfLength(len int) int {

return i
}

// RandomStringOfLength is a random 1 to 1024 character string which is unique to this test case
func (td *TestData) RandomStringOfLength(len int) string {
// len should not be less then 1 or greater than 1024
if 1 > len || len > 1024 {
panic(fmt.Sprintf("Invalid Test: RandomStringOfLength: length argument must be between 1 and 1024 characters"))
}

return acctest.RandString(len)
}
12 changes: 9 additions & 3 deletions azurerm/internal/services/keyvault/data_source_key_vault_key.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,13 @@ func dataSourceArmKeyVaultKey() *schema.Resource {
},

"key_vault_id": {
Type: schema.TypeString,
Required: true,
ValidateFunc: azure.ValidateResourceID,
Type: schema.TypeString,
Computed: true,
},

"key_vault_access_policy_id": {
Type: schema.TypeString,
Computed: true,
},

"key_type": {
Expand Down Expand Up @@ -85,6 +89,8 @@ func dataSourceArmKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) erro
return fmt.Errorf("Error looking up Key %q vault url from id %q: %+v", name, keyVaultId, err)
}

d.Set("key_vault_id", keyVaultId)

resp, err := client.GetKey(ctx, keyVaultBaseUri, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
Expand Down
11 changes: 11 additions & 0 deletions azurerm/internal/services/keyvault/resource_arm_key_vault_key.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,13 @@ func resourceArmKeyVaultKey() *schema.Resource {
ValidateFunc: azure.ValidateResourceID,
},

"key_vault_access_policy_id": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ValidateFunc: azure.ValidateResourceID,
},

"key_type": {
Type: schema.TypeString,
Required: true,
Expand Down Expand Up @@ -302,6 +309,8 @@ func resourceArmKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
defer cancel()

keyVaultAccessPolicyId := d.Get("key_vault_access_policy_id").(string)
tombuildsstuff marked this conversation as resolved.
Show resolved Hide resolved

id, err := azure.ParseKeyVaultChildID(d.Id())
if err != nil {
return err
Expand Down Expand Up @@ -339,6 +348,8 @@ func resourceArmKeyVaultKeyRead(d *schema.ResourceData, meta interface{}) error
}

d.Set("name", id.Name)
d.Set("key_vault_access_policy_id", keyVaultAccessPolicyId)
tombuildsstuff marked this conversation as resolved.
Show resolved Hide resolved

if key := resp.Key; key != nil {
d.Set("key_type", string(key.Kty))

Expand Down
27 changes: 0 additions & 27 deletions azurerm/internal/services/storage/data_source_storage_account.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,6 @@ func dataSourceArmStorageAccount() *schema.Resource {
Computed: true,
},

"account_encryption_source": {
Type: schema.TypeString,
Computed: true,
},

"custom_domain": {
Type: schema.TypeList,
Computed: true,
Expand All @@ -72,16 +67,6 @@ func dataSourceArmStorageAccount() *schema.Resource {
},
},

"enable_blob_encryption": {
Type: schema.TypeBool,
Computed: true,
},

"enable_file_encryption": {
Type: schema.TypeBool,
Computed: true,
},

"enable_https_traffic_only": {
Type: schema.TypeBool,
Computed: true,
Expand Down Expand Up @@ -329,18 +314,6 @@ func dataSourceArmStorageAccountRead(d *schema.ResourceData, meta interface{}) e
}
}

if encryption := props.Encryption; encryption != nil {
if services := encryption.Services; services != nil {
if blob := services.Blob; blob != nil {
d.Set("enable_blob_encryption", blob.Enabled)
}
if file := services.File; file != nil {
d.Set("enable_file_encryption", file.Enabled)
}
}
d.Set("account_encryption_source", string(encryption.KeySource))
}

// Computed
d.Set("primary_location", props.PrimaryLocation)
d.Set("secondary_location", props.SecondaryLocation)
Expand Down
23 changes: 12 additions & 11 deletions azurerm/internal/services/storage/registration.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,17 @@ func (r Registration) SupportedDataSources() map[string]*schema.Resource {
// SupportedResources returns the supported Resources supported by this Service
func (r Registration) SupportedResources() map[string]*schema.Resource {
return map[string]*schema.Resource{
"azurerm_storage_account": resourceArmStorageAccount(),
"azurerm_storage_account_network_rules": resourceArmStorageAccountNetworkRules(),
"azurerm_storage_blob": resourceArmStorageBlob(),
"azurerm_storage_container": resourceArmStorageContainer(),
"azurerm_storage_data_lake_gen2_filesystem": resourceArmStorageDataLakeGen2FileSystem(),
"azurerm_storage_management_policy": resourceArmStorageManagementPolicy(),
"azurerm_storage_queue": resourceArmStorageQueue(),
"azurerm_storage_share": resourceArmStorageShare(),
"azurerm_storage_share_directory": resourceArmStorageShareDirectory(),
"azurerm_storage_table": resourceArmStorageTable(),
"azurerm_storage_table_entity": resourceArmStorageTableEntity(),
"azurerm_storage_account": resourceArmStorageAccount(),
"azurerm_storage_account_customer_managed_key": resourceArmStorageAccountCustomerManagedKey(),
"azurerm_storage_account_network_rules": resourceArmStorageAccountNetworkRules(),
"azurerm_storage_blob": resourceArmStorageBlob(),
"azurerm_storage_container": resourceArmStorageContainer(),
"azurerm_storage_data_lake_gen2_filesystem": resourceArmStorageDataLakeGen2FileSystem(),
"azurerm_storage_management_policy": resourceArmStorageManagementPolicy(),
"azurerm_storage_queue": resourceArmStorageQueue(),
"azurerm_storage_share": resourceArmStorageShare(),
"azurerm_storage_share_directory": resourceArmStorageShareDirectory(),
"azurerm_storage_table": resourceArmStorageTable(),
"azurerm_storage_table_entity": resourceArmStorageTableEntity(),
}
}
82 changes: 0 additions & 82 deletions azurerm/internal/services/storage/resource_arm_storage_account.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,17 +112,6 @@ func resourceArmStorageAccount() *schema.Resource {
}, true),
},

"account_encryption_source": {
Type: schema.TypeString,
Optional: true,
Default: string(storage.MicrosoftStorage),
ValidateFunc: validation.StringInSlice([]string{
string(storage.MicrosoftKeyvault),
string(storage.MicrosoftStorage),
}, true),
DiffSuppressFunc: suppress.CaseDifference,
},

"custom_domain": {
Type: schema.TypeList,
Optional: true,
Expand All @@ -143,18 +132,6 @@ func resourceArmStorageAccount() *schema.Resource {
},
},

"enable_blob_encryption": {
Type: schema.TypeBool,
Optional: true,
Default: true,
},

"enable_file_encryption": {
Type: schema.TypeBool,
Optional: true,
Default: true,
},

"enable_https_traffic_only": {
Type: schema.TypeBool,
Optional: true,
Expand Down Expand Up @@ -619,15 +596,12 @@ func resourceArmStorageAccountCreate(d *schema.ResourceData, meta interface{}) e
accountKind := d.Get("account_kind").(string)
location := azure.NormalizeLocation(d.Get("location").(string))
t := d.Get("tags").(map[string]interface{})
enableBlobEncryption := d.Get("enable_blob_encryption").(bool)
enableFileEncryption := d.Get("enable_file_encryption").(bool)
enableHTTPSTrafficOnly := d.Get("enable_https_traffic_only").(bool)
isHnsEnabled := d.Get("is_hns_enabled").(bool)

accountTier := d.Get("account_tier").(string)
replicationType := d.Get("account_replication_type").(string)
storageType := fmt.Sprintf("%s_%s", accountTier, replicationType)
storageAccountEncryptionSource := d.Get("account_encryption_source").(string)

parameters := storage.AccountCreateParameters{
Location: &location,
Expand All @@ -637,16 +611,6 @@ func resourceArmStorageAccountCreate(d *schema.ResourceData, meta interface{}) e
Tags: tags.Expand(t),
Kind: storage.Kind(accountKind),
AccountPropertiesCreateParameters: &storage.AccountPropertiesCreateParameters{
Encryption: &storage.Encryption{
Services: &storage.EncryptionServices{
Blob: &storage.EncryptionService{
Enabled: utils.Bool(enableBlobEncryption),
},
File: &storage.EncryptionService{
Enabled: utils.Bool(enableFileEncryption),
}},
KeySource: storage.KeySource(storageAccountEncryptionSource),
},
EnableHTTPSTrafficOnly: &enableHTTPSTrafficOnly,
NetworkRuleSet: expandStorageAccountNetworkRules(d),
IsHnsEnabled: &isHnsEnabled,
Expand Down Expand Up @@ -847,40 +811,6 @@ func resourceArmStorageAccountUpdate(d *schema.ResourceData, meta interface{}) e
d.SetPartial("tags")
}

if d.HasChange("enable_blob_encryption") || d.HasChange("enable_file_encryption") {
encryptionSource := d.Get("account_encryption_source").(string)

opts := storage.AccountUpdateParameters{
AccountPropertiesUpdateParameters: &storage.AccountPropertiesUpdateParameters{
Encryption: &storage.Encryption{
Services: &storage.EncryptionServices{},
KeySource: storage.KeySource(encryptionSource),
},
},
}

if d.HasChange("enable_blob_encryption") {
enableEncryption := d.Get("enable_blob_encryption").(bool)
opts.Encryption.Services.Blob = &storage.EncryptionService{
Enabled: utils.Bool(enableEncryption),
}

d.SetPartial("enable_blob_encryption")
}

if d.HasChange("enable_file_encryption") {
enableEncryption := d.Get("enable_file_encryption").(bool)
opts.Encryption.Services.File = &storage.EncryptionService{
Enabled: utils.Bool(enableEncryption),
}
d.SetPartial("enable_file_encryption")
}

if _, err := client.Update(ctx, resourceGroupName, storageAccountName, opts); err != nil {
return fmt.Errorf("Error updating Azure Storage Account Encryption %q: %+v", storageAccountName, err)
}
}

if d.HasChange("custom_domain") {
opts := storage.AccountUpdateParameters{
AccountPropertiesUpdateParameters: &storage.AccountPropertiesUpdateParameters{
Expand Down Expand Up @@ -1066,18 +996,6 @@ func resourceArmStorageAccountRead(d *schema.ResourceData, meta interface{}) err
}
}

if encryption := props.Encryption; encryption != nil {
if services := encryption.Services; services != nil {
if blob := services.Blob; blob != nil {
d.Set("enable_blob_encryption", blob.Enabled)
}
if file := services.File; file != nil {
d.Set("enable_file_encryption", file.Enabled)
}
}
d.Set("account_encryption_source", string(encryption.KeySource))
}

// Computed
d.Set("primary_location", props.PrimaryLocation)
d.Set("secondary_location", props.SecondaryLocation)
Expand Down
Loading