Skip to content

Commit

Permalink
feat: Updates mongodbatlas_stream_connection resource & data sources …
Browse files Browse the repository at this point in the history
…to support dbRoleToExecute attribute (#1980)
  • Loading branch information
maastha authored Mar 2, 2024
1 parent deac25e commit ee8789f
Show file tree
Hide file tree
Showing 10 changed files with 150 additions and 30 deletions.
4 changes: 4 additions & 0 deletions examples/mongodbatlas_stream_connection/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ resource "mongodbatlas_stream_connection" "example-cluster" {
connection_name = "ClusterConnection"
type = "Cluster"
cluster_name = var.cluster_name
db_role_to_execute = {
role = "atlasAdmin"
type = "BUILT_IN"
}
}

resource "mongodbatlas_stream_connection" "example-kafka-plaintext" {
Expand Down
11 changes: 11 additions & 0 deletions internal/service/streamconnection/data_source_stream_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,17 @@ func DSAttributes(withArguments bool) map[string]schema.Attribute {
"cluster_name": schema.StringAttribute{
Computed: true,
},
"db_role_to_execute": schema.SingleNestedAttribute{
Computed: true,
Attributes: map[string]schema.Attribute{
"role": schema.StringAttribute{
Computed: true,
},
"type": schema.StringAttribute{
Computed: true,
},
},
},

// kafka type specific
"authentication": schema.SingleNestedAttribute{
Expand Down
23 changes: 23 additions & 0 deletions internal/service/streamconnection/model_stream_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,17 @@ func NewStreamConnectionReq(ctx context.Context, plan *TFStreamConnectionModel)
streamConnection.Config = configMap
}

if !plan.DBRoleToExecute.IsNull() {
dbRoleToExecuteModel := &TFDbRoleToExecuteModel{}
if diags := plan.DBRoleToExecute.As(ctx, dbRoleToExecuteModel, basetypes.ObjectAsOptions{}); diags.HasError() {
return nil, diags
}
streamConnection.DbRoleToExecute = &admin.DBRoleToExecute{
Role: dbRoleToExecuteModel.Role.ValueStringPointer(),
Type: dbRoleToExecuteModel.Type.ValueStringPointer(),
}
}

return &streamConnection, nil
}

Expand Down Expand Up @@ -91,6 +102,18 @@ func NewTFStreamConnection(ctx context.Context, projID, instanceName string, cur
connectionModel.Security = securityModel
}

connectionModel.DBRoleToExecute = types.ObjectNull(DBRoleToExecuteObjectType.AttrTypes)
if apiResp.DbRoleToExecute != nil {
dbRoleToExecuteModel, diags := types.ObjectValueFrom(ctx, DBRoleToExecuteObjectType.AttrTypes, TFDbRoleToExecuteModel{
Role: types.StringPointerValue(apiResp.DbRoleToExecute.Role),
Type: types.StringPointerValue(apiResp.DbRoleToExecute.Type),
})
if diags.HasError() {
return nil, diags
}
connectionModel.DBRoleToExecute = dbRoleToExecuteModel
}

return &connectionModel, nil
}

Expand Down
91 changes: 62 additions & 29 deletions internal/service/streamconnection/model_stream_connection_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ const (
authUsername = "user1"
securityProtocol = "SSL"
bootstrapServers = "localhost:9092,another.host:9092"
dbRole = "customRole"
dbRoleType = "CUSTOM"
)

var configMap = map[string]string{
Expand All @@ -46,19 +48,24 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) {
Name: admin.PtrString(connectionName),
Type: admin.PtrString("Cluster"),
ClusterName: admin.PtrString(clusterName),
DbRoleToExecute: &admin.DBRoleToExecute{
Role: admin.PtrString(dbRole),
Type: admin.PtrString(dbRoleType),
},
},
providedProjID: dummyProjectID,
providedInstanceName: instanceName,
providedAuthConfig: nil,
expectedTFModel: &streamconnection.TFStreamConnectionModel{
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Cluster"),
ClusterName: types.StringValue(clusterName),
Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes),
Config: types.MapNull(types.StringType),
Security: types.ObjectNull(streamconnection.ConnectionSecurityObjectType.AttrTypes),
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Cluster"),
ClusterName: types.StringValue(clusterName),
Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes),
Config: types.MapNull(types.StringType),
Security: types.ObjectNull(streamconnection.ConnectionSecurityObjectType.AttrTypes),
DBRoleToExecute: tfDBRoleToExecuteObject(t, dbRole, dbRoleType),
},
},
{
Expand Down Expand Up @@ -89,6 +96,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) {
BootstrapServers: types.StringValue(bootstrapServers),
Config: tfConfigMap(t, configMap),
Security: tfSecurityObject(t, DummyCACert, securityProtocol),
DBRoleToExecute: types.ObjectNull(streamconnection.DBRoleToExecuteObjectType.AttrTypes),
},
},
{
Expand All @@ -101,13 +109,14 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) {
providedInstanceName: instanceName,
providedAuthConfig: nil,
expectedTFModel: &streamconnection.TFStreamConnectionModel{
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Kafka"),
Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes),
Config: types.MapNull(types.StringType),
Security: types.ObjectNull(streamconnection.ConnectionSecurityObjectType.AttrTypes),
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Kafka"),
Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes),
Config: types.MapNull(types.StringType),
Security: types.ObjectNull(streamconnection.ConnectionSecurityObjectType.AttrTypes),
DBRoleToExecute: types.ObjectNull(streamconnection.DBRoleToExecuteObjectType.AttrTypes),
},
},
{
Expand Down Expand Up @@ -138,6 +147,7 @@ func TestStreamConnectionSDKToTFModel(t *testing.T) {
BootstrapServers: types.StringValue(bootstrapServers),
Config: tfConfigMap(t, configMap),
Security: tfSecurityObject(t, DummyCACert, securityProtocol),
DBRoleToExecute: types.ObjectNull(streamconnection.DBRoleToExecuteObjectType.AttrTypes),
},
},
}
Expand Down Expand Up @@ -187,6 +197,10 @@ func TestStreamConnectionsSDKToTFModel(t *testing.T) {
Name: admin.PtrString(connectionName),
Type: admin.PtrString("Cluster"),
ClusterName: admin.PtrString(clusterName),
DbRoleToExecute: &admin.DBRoleToExecute{
Role: admin.PtrString(dbRole),
Type: admin.PtrString(dbRoleType),
},
},
},
TotalCount: admin.PtrInt(2),
Expand Down Expand Up @@ -214,17 +228,19 @@ func TestStreamConnectionsSDKToTFModel(t *testing.T) {
BootstrapServers: types.StringValue(bootstrapServers),
Config: tfConfigMap(t, configMap),
Security: tfSecurityObject(t, DummyCACert, securityProtocol),
DBRoleToExecute: types.ObjectNull(streamconnection.DBRoleToExecuteObjectType.AttrTypes),
},
{
ID: types.StringValue(fmt.Sprintf("%s-%s-%s", instanceName, dummyProjectID, connectionName)),
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Cluster"),
ClusterName: types.StringValue(clusterName),
Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes),
Config: types.MapNull(types.StringType),
Security: types.ObjectNull(streamconnection.ConnectionSecurityObjectType.AttrTypes),
ID: types.StringValue(fmt.Sprintf("%s-%s-%s", instanceName, dummyProjectID, connectionName)),
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Cluster"),
ClusterName: types.StringValue(clusterName),
Authentication: types.ObjectNull(streamconnection.ConnectionAuthenticationObjectType.AttrTypes),
Config: types.MapNull(types.StringType),
Security: types.ObjectNull(streamconnection.ConnectionSecurityObjectType.AttrTypes),
DBRoleToExecute: tfDBRoleToExecuteObject(t, dbRole, dbRoleType),
},
},
},
Expand Down Expand Up @@ -275,16 +291,21 @@ func TestStreamInstanceTFToSDKCreateModel(t *testing.T) {
{
name: "Cluster type complete TF state",
tfModel: &streamconnection.TFStreamConnectionModel{
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Cluster"),
ClusterName: types.StringValue(clusterName),
ProjectID: types.StringValue(dummyProjectID),
InstanceName: types.StringValue(instanceName),
ConnectionName: types.StringValue(connectionName),
Type: types.StringValue("Cluster"),
ClusterName: types.StringValue(clusterName),
DBRoleToExecute: tfDBRoleToExecuteObject(t, dbRole, dbRoleType),
},
expectedSDKReq: &admin.StreamsConnection{
Name: admin.PtrString(connectionName),
Type: admin.PtrString("Cluster"),
ClusterName: admin.PtrString(clusterName),
DbRoleToExecute: &admin.DBRoleToExecute{
Role: admin.PtrString(dbRole),
Type: admin.PtrString(dbRoleType),
},
},
},
{
Expand Down Expand Up @@ -388,3 +409,15 @@ func tfConfigMap(t *testing.T, config map[string]string) types.Map {
}
return mapValue
}

func tfDBRoleToExecuteObject(t *testing.T, role, roleType string) types.Object {
t.Helper()
auth, diags := types.ObjectValueFrom(context.Background(), streamconnection.DBRoleToExecuteObjectType.AttrTypes, streamconnection.TFDbRoleToExecuteModel{
Role: types.StringValue(role),
Type: types.StringValue(roleType),
})
if diags.HasError() {
t.Errorf("failed to create terraform data model: %s", diags.Errors()[0].Summary())
}
return auth
}
27 changes: 27 additions & 0 deletions internal/service/streamconnection/resource_stream_connection.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@ import (
"errors"
"regexp"

"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
)
Expand Down Expand Up @@ -43,6 +45,7 @@ type TFStreamConnectionModel struct {
BootstrapServers types.String `tfsdk:"bootstrap_servers"`
Config types.Map `tfsdk:"config"`
Security types.Object `tfsdk:"security"`
DBRoleToExecute types.Object `tfsdk:"db_role_to_execute"`
}

type TFConnectionAuthenticationModel struct {
Expand All @@ -67,6 +70,16 @@ var ConnectionSecurityObjectType = types.ObjectType{AttrTypes: map[string]attr.T
"protocol": types.StringType,
}}

type TFDbRoleToExecuteModel struct {
Role types.String `tfsdk:"role"`
Type types.String `tfsdk:"type"`
}

var DBRoleToExecuteObjectType = types.ObjectType{AttrTypes: map[string]attr.Type{
"role": types.StringType,
"type": types.StringType,
}}

func (r *streamConnectionRS) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
resp.Schema = schema.Schema{
Attributes: map[string]schema.Attribute{
Expand Down Expand Up @@ -102,6 +115,20 @@ func (r *streamConnectionRS) Schema(ctx context.Context, req resource.SchemaRequ
"cluster_name": schema.StringAttribute{
Optional: true,
},
"db_role_to_execute": schema.SingleNestedAttribute{
Optional: true,
Attributes: map[string]schema.Attribute{
"role": schema.StringAttribute{
Required: true,
},
"type": schema.StringAttribute{
Required: true,
Validators: []validator.String{
stringvalidator.OneOf("BUILT_IN", "CUSTOM"),
},
},
},
},

// kafka type specific
"authentication": schema.SingleNestedAttribute{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ func TestAccMigrationStreamRSStreamConnection_cluster(t *testing.T) {
clusterInfo = acc.GetClusterInfo(nil)
instanceName = acc.RandomName()
)
mig.SkipIfVersionBelow(t, "1.14.0")
mig.SkipIfVersionBelow(t, "1.15.2")

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acc.PreCheckBetaFlag(t); acc.PreCheckBasic(t) },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,10 @@ func clusterStreamConnectionConfig(projectIDStr, instanceName, clusterNameStr, c
connection_name = "ConnectionNameKafka"
type = "Cluster"
cluster_name = %[3]s
db_role_to_execute = {
role = "atlasAdmin"
type = "BUILT_IN"
}
}
`, projectIDStr, instanceName, clusterNameStr)
}
Expand All @@ -192,6 +196,8 @@ func clusterStreamConnectionAttributeChecks(resourceName, clusterName string) re
resource.TestCheckResourceAttrSet(resourceName, "connection_name"),
resource.TestCheckResourceAttr(resourceName, "type", "Cluster"),
resource.TestCheckResourceAttr(resourceName, "cluster_name", clusterName),
resource.TestCheckResourceAttr(resourceName, "db_role_to_execute.role", "atlasAdmin"),
resource.TestCheckResourceAttr(resourceName, "db_role_to_execute.type", "BUILT_IN"),
}
return resource.ComposeTestCheckFunc(resourceChecks...)
}
Expand Down
5 changes: 5 additions & 0 deletions website/docs/d/stream_connection.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ data "mongodbatlas_stream_connection" "example" {

If `type` is of value `Cluster` the following additional attributes are defined:
* `cluster_name` - Name of the cluster configured for this connection.
* `db_role_to_execute` - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See [DBRoleToExecute](#DBRoleToExecute).

If `type` is of value `Kafka` the following additional attributes are defined:
* `authentication` - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See [authentication](#authentication).
Expand All @@ -52,6 +53,10 @@ If `type` is of value `Kafka` the following additional attributes are defined:
* `broker_public_certificate` - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
* `protocol` - Describes the transport type. Can be either `PLAINTEXT` or `SSL`.

### DBRoleToExecute

* `role` - The name of the role to use. Can be a built in role or a custom role.
* `type` - Type of the DB role. Can be either BUILT_IN or CUSTOM.

To learn more, see: [MongoDB Atlas API - Stream Connection](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/getStreamConnection) Documentation.
The [Terraform Provider Examples Section](https://github.com/mongodb/terraform-provider-mongodbatlas/blob/master/examples/mongodbatlas_stream_instance/atlas-streams-user-journey.md) also contains details on the overall support for Atlas Streams Processing in Terraform.
5 changes: 5 additions & 0 deletions website/docs/d/stream_connections.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ In addition to all arguments above, it also exports the following attributes:

If `type` is of value `Cluster` the following additional attributes are defined:
* `cluster_name` - Name of the cluster configured for this connection.
* `db_role_to_execute` - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See [DBRoleToExecute](#DBRoleToExecute).

If `type` is of value `Kafka` the following additional attributes are defined:
* `authentication` - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See [authentication](#authentication).
Expand All @@ -64,6 +65,10 @@ If `type` is of value `Kafka` the following additional attributes are defined:
* `broker_public_certificate` - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
* `protocol` - Describes the transport type. Can be either `PLAINTEXT` or `SSL`.

### DBRoleToExecute

* `role` - The name of the role to use. Can be a built in role or a custom role.
* `type` - Type of the DB role. Can be either BUILT_IN or CUSTOM.

To learn more, see: [MongoDB Atlas API - Stream Connection](https://www.mongodb.com/docs/atlas/reference/api-resources-spec/#tag/Streams/operation/listStreamConnections) Documentation.
The [Terraform Provider Examples Section](https://github.com/mongodb/terraform-provider-mongodbatlas/blob/master/examples/mongodbatlas_stream_instance/atlas-streams-user-journey.md) also contains details on the overall support for Atlas Streams Processing in Terraform.
6 changes: 6 additions & 0 deletions website/docs/r/stream_connection.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ resource "mongodbatlas_stream_connection" "test" {

If `type` is of value `Cluster` the following additional arguments are defined:
* `cluster_name` - Name of the cluster configured for this connection.
* `db_role_to_execute` - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See [DBRoleToExecute](#DBRoleToExecute).

If `type` is of value `Kafka` the following additional arguments are defined:
* `authentication` - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See [authentication](#authentication).
Expand All @@ -103,6 +104,11 @@ If `type` is of value `Kafka` the following additional arguments are defined:
* `broker_public_certificate` - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
* `protocol` - Describes the transport type. Can be either `PLAINTEXT` or `SSL`.

### DBRoleToExecute

* `role` - The name of the role to use. Can be a built in role or a custom role.
* `type` - Type of the DB role. Can be either BUILT_IN or CUSTOM.

## Import

You can import a stream connection resource using the instance name, project ID, and connection name. The format must be `INSTANCE_NAME-PROJECT_ID-CONNECTION_NAME`. For example:
Expand Down

0 comments on commit ee8789f

Please sign in to comment.