Skip to content

Commit

Permalink
Update more tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Daniel Intskirveli committed Nov 12, 2019
1 parent 083745c commit f77a95b
Show file tree
Hide file tree
Showing 4 changed files with 134 additions and 4 deletions.
2 changes: 2 additions & 0 deletions azurerm/helpers/azure/hdinsight.go
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,8 @@ func SchemaHDInsightsGen2StorageAccounts() *schema.Schema {
return &schema.Schema{
Type: schema.TypeList,
Optional: true,
// HDInsight doesn't seem to allow adding more than one gen2 cluster right now.
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"storage_resource_id": {
Expand Down
132 changes: 132 additions & 0 deletions azurerm/resource_arm_hdinsight_kafka_cluster_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,43 @@ func TestAccAzureRMHDInsightKafkaCluster_basic(t *testing.T) {
})
}

func TestAccAzureRMHDInsightKafkaCluster_gen2storage(t *testing.T) {
resourceName := "azurerm_hdinsight_kafka_cluster.test"
ri := tf.AccRandTimeInt()
rs := strings.ToLower(acctest.RandString(11))
location := testLocation()

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMHDInsightClusterDestroy("azurerm_hdinsight_kafka_cluster"),
Steps: []resource.TestStep{
{
Config: testAccAzureRMHDInsightKafkaCluster_gen2storage(ri, rs, location),
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMHDInsightClusterExists(resourceName),
resource.TestCheckResourceAttrSet(resourceName, "https_endpoint"),
resource.TestCheckResourceAttrSet(resourceName, "ssh_endpoint"),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{
"roles.0.head_node.0.password",
"roles.0.head_node.0.vm_size",
"roles.0.worker_node.0.password",
"roles.0.worker_node.0.vm_size",
"roles.0.zookeeper_node.0.password",
"roles.0.zookeeper_node.0.vm_size",
"storage_account",
},
},
},
})
}

func TestAccAzureRMHDInsightKafkaCluster_requiresImport(t *testing.T) {
if !features.ShouldResourcesBeImported() {
t.Skip("Skipping since resources aren't required to be imported")
Expand Down Expand Up @@ -303,6 +340,62 @@ resource "azurerm_hdinsight_kafka_cluster" "test" {
`, template, rInt)
}

func testAccAzureRMHDInsightKafkaCluster_gen2storage(rInt int, rString string, location string) string {
template := testAccAzureRMHDInsightKafkaCluster_gen2template(rInt, rString, location)
return fmt.Sprintf(`
%s
resource "azurerm_hdinsight_kafka_cluster" "test" {
depends_on = [azurerm_role_assignment.test]
name = "acctesthdi-%d"
resource_group_name = "${azurerm_resource_group.test.name}"
location = "${azurerm_resource_group.test.location}"
cluster_version = "3.6"
tier = "Standard"
component_version {
kafka = "1.1"
}
gateway {
enabled = true
username = "acctestusrgw"
password = "TerrAform123!"
}
storage_account_gen2 {
storage_resource_id = azurerm_storage_account.gen2test.id
filesystem_id = azurerm_storage_data_lake_gen2_filesystem.gen2test.id
managed_identity_resource_id = azurerm_user_assigned_identity.test.id
is_default = true
}
roles {
head_node {
vm_size = "Standard_D3_V2"
username = "acctestusrvm"
password = "AccTestvdSC4daf986!"
}
worker_node {
vm_size = "Standard_D3_V2"
username = "acctestusrvm"
password = "AccTestvdSC4daf986!"
target_instance_count = 3
number_of_disks_per_node = 2
}
zookeeper_node {
vm_size = "Standard_D3_V2"
username = "acctestusrvm"
password = "AccTestvdSC4daf986!"
}
}
}
`, template, rInt)
}

func testAccAzureRMHDInsightKafkaCluster_requiresImport(rInt int, rString string, location string) string {
template := testAccAzureRMHDInsightKafkaCluster_basic(rInt, rString, location)
return fmt.Sprintf(`
Expand Down Expand Up @@ -609,3 +702,42 @@ resource "azurerm_storage_container" "test" {
}
`, rInt, location, rString)
}

func testAccAzureRMHDInsightKafkaCluster_gen2template(rInt int, rString string, location string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestRG-%d"
location = "%s"
}
resource "azurerm_storage_account" "gen2test" {
name = "accgen2test%s"
resource_group_name = azurerm_resource_group.test.name
location = azurerm_resource_group.test.location
account_kind = "StorageV2"
account_tier = "Standard"
account_replication_type = "LRS"
is_hns_enabled = true
}
resource "azurerm_storage_data_lake_gen2_filesystem" "gen2test" {
name = "acctest"
storage_account_id = azurerm_storage_account.gen2test.id
}
resource "azurerm_user_assigned_identity" "test" {
resource_group_name = "${azurerm_resource_group.test.name}"
location = "${azurerm_resource_group.test.location}"
name = "test-identity"
}
data "azurerm_subscription" "primary" {}
resource "azurerm_role_assignment" "test" {
scope = "${data.azurerm_subscription.primary.id}"
role_definition_name = "Storage Blob Data Owner"
principal_id = "${azurerm_user_assigned_identity.test.principal_id}"
}
`, rInt, location, rString)
}
2 changes: 0 additions & 2 deletions azurerm/resource_arm_hdinsight_ml_services_cluster.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,6 @@ func resourceArmHDInsightMLServicesCluster() *schema.Resource {

"storage_account": azure.SchemaHDInsightsStorageAccounts(),

"storage_account_gen2": azure.SchemaHDInsightsGen2StorageAccounts(),

"roles": {
Type: schema.TypeList,
Required: true,
Expand Down
2 changes: 0 additions & 2 deletions azurerm/resource_arm_hdinsight_rserver_cluster.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,6 @@ func resourceArmHDInsightRServerCluster() *schema.Resource {

"storage_account": azure.SchemaHDInsightsStorageAccounts(),

"storage_account_gen2": azure.SchemaHDInsightsGen2StorageAccounts(),

"roles": {
Type: schema.TypeList,
Required: true,
Expand Down

0 comments on commit f77a95b

Please sign in to comment.