Skip to content

Commit

Permalink
azurerm_databricks_workspace - support for enhanced security complian…
Browse files Browse the repository at this point in the history
…ce features (hashicorp#26606)

* added 'enhanced_security_compliance' block to databricks workspace schema

* added customized diff checks to verify databricks workspace 'enhanced_security_compliance' block

* added premium sku check for databricks workspace 'enhanced_security_compliance'

* added 'enhanced_security_compliance' block to read and create/update methods

* documented new 'enhanced_security_compliance' block for 'azurerm_databricks_workspace' resource

* Fixed bugs, added acceptance test

* Fix schema linting check

* Added data source support and corresponding acctest

* PR feedbacks improvements #1:

- Always set value into state
- Simplified expansion logic
- Acctests method renaming to conform with standards
- Removed unnecessary parameterisation of acctest config method
- Docs for datasource

* PR feedbacks improvements 2:

- Removed stray newline
- Use pointer.From to prevent nil deref
- Improved validation error msg

* PR feedbacks improvements 3:

- Replace single-quote with backticks on error messages
- Remove RequiredWith from compliance_security_profile_standards
- Remove redundant test checks
- Improve datasource docs
- Add 'defaults to' to resource doc

---------

Co-authored-by: Gerry Tan <gerry.tan@microsoft.com>
  • Loading branch information
ssouthcity and gerrytan authored Nov 27, 2024
1 parent c5ca73a commit 0b5023d
Show file tree
Hide file tree
Showing 9 changed files with 478 additions and 38 deletions.
7 changes: 7 additions & 0 deletions examples/databricks/enhanced-security-compliance/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
## Example: Databricks Workspace with Enhanced Security and Compliance

This example provisions a Databricks Workspace within Azure with Enhanced Security and Compliance settings enabled.

### Variables

* `prefix` - (Required) The prefix used for all resources in this example.
23 changes: 23 additions & 0 deletions examples/databricks/enhanced-security-compliance/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
provider "azurerm" {
features {}
}

resource "azurerm_resource_group" "example" {
name = "${var.prefix}-databricks-esc"
location = "West Europe"
}

resource "azurerm_databricks_workspace" "example" {
name = "${var.prefix}-DBW"
resource_group_name = azurerm_resource_group.example.name
location = azurerm_resource_group.example.location
sku = "premium"
managed_resource_group_name = "${var.prefix}-DBW-managed-esc"

enhanced_security_compliance {
automatic_cluster_update_enabled = true
compliance_security_profile_enabled = true
compliance_security_profile_standards = ["HIPAA", "PCI_DSS"]
enhanced_security_monitoring_enabled = true
}
}
7 changes: 7 additions & 0 deletions examples/databricks/enhanced-security-compliance/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Copyright (c) HashiCorp, Inc.
# SPDX-License-Identifier: MPL-2.0

variable "prefix" {
description = "The Prefix used for all resources in this example"
}

31 changes: 31 additions & 0 deletions internal/services/databricks/databricks_workspace_data_source.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,34 @@ func dataSourceDatabricksWorkspace() *pluginsdk.Resource {
},
},

"enhanced_security_compliance": {
Type: pluginsdk.TypeList,
Computed: true,
Elem: &pluginsdk.Resource{
Schema: map[string]*pluginsdk.Schema{
"automatic_cluster_update_enabled": {
Type: pluginsdk.TypeBool,
Computed: true,
},
"compliance_security_profile_enabled": {
Type: pluginsdk.TypeBool,
Computed: true,
},
"compliance_security_profile_standards": {
Type: pluginsdk.TypeSet,
Computed: true,
Elem: &pluginsdk.Schema{
Type: pluginsdk.TypeString,
},
},
"enhanced_security_monitoring_enabled": {
Type: pluginsdk.TypeBool,
Computed: true,
},
},
},
},

"tags": commonschema.Tags(),
},
}
Expand Down Expand Up @@ -138,6 +166,9 @@ func dataSourceDatabricksWorkspaceRead(d *pluginsdk.ResourceData, meta interface
}
d.Set("workspace_url", model.Properties.WorkspaceURL)
d.Set("location", model.Location)
if err := d.Set("enhanced_security_compliance", flattenWorkspaceEnhancedSecurity(model.Properties.EnhancedSecurityCompliance)); err != nil {
return fmt.Errorf("setting `enhanced_security_compliance`: %+v", err)
}

return tags.FlattenAndSet(d, model.Tags)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,27 @@ func TestAccDatabricksWorkspaceDataSource_storageAccountIdentity(t *testing.T) {
})
}

func TestAccDatabricksWorkspaceDataSource_enhancedComplianceSecurity(t *testing.T) {
data := acceptance.BuildTestData(t, "data.azurerm_databricks_workspace", "test")
r := DatabricksWorkspaceDataSource{}

data.DataSourceTest(t, []acceptance.TestStep{
{
Config: r.enhancedSecurityCompliance(data),
Check: acceptance.ComposeTestCheckFunc(
acceptance.TestMatchResourceAttr(data.ResourceName, "workspace_url", regexp.MustCompile("azuredatabricks.net")),
check.That(data.ResourceName).Key("workspace_id").Exists(),
check.That(data.ResourceName).Key("location").Exists(),
check.That(data.ResourceName).Key("enhanced_security_compliance.#").HasValue("1"),
check.That(data.ResourceName).Key("enhanced_security_compliance.0.automatic_cluster_update_enabled").HasValue("true"),
check.That(data.ResourceName).Key("enhanced_security_compliance.0.compliance_security_profile_enabled").HasValue("true"),
check.That(data.ResourceName).Key("enhanced_security_compliance.0.compliance_security_profile_standards.#").HasValue("2"),
check.That(data.ResourceName).Key("enhanced_security_compliance.0.enhanced_security_monitoring_enabled").HasValue("true"),
),
},
})
}

func (DatabricksWorkspaceDataSource) basic(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
Expand Down Expand Up @@ -195,3 +216,35 @@ resource "azurerm_key_vault_access_policy" "databricks" {
}
`, data.RandomInteger, data.Locations.Primary, data.RandomString, getDatabricksPrincipalId(data.Client().SubscriptionID))
}

func (DatabricksWorkspaceDataSource) enhancedSecurityCompliance(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
features {}
}
resource "azurerm_resource_group" "test" {
name = "acctestRG-databricks-%d"
location = "%s"
}
resource "azurerm_databricks_workspace" "test" {
name = "acctestDBW-%d"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
sku = "premium"
enhanced_security_compliance {
automatic_cluster_update_enabled = true
compliance_security_profile_enabled = true
compliance_security_profile_standards = ["PCI_DSS", "HIPAA"]
enhanced_security_monitoring_enabled = true
}
}
data "azurerm_databricks_workspace" "test" {
name = azurerm_databricks_workspace.test.name
resource_group_name = azurerm_resource_group.test.name
}
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger)
}
Loading

0 comments on commit 0b5023d

Please sign in to comment.