Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Uses id as a computed attribute for data.cloud_backup_snapshot_export_job #2234

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/2234.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
data-source/cloud_backup_snapshot_export_job: Marks `id` as computed not required
```
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,22 @@ package cloudbackupsnapshotexportjob

import (
"context"
"fmt"

"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant"
)

func DataSource() *schema.Resource {
return &schema.Resource{
ReadContext: dataSourceMongoDBAtlasCloudBackupSnapshotsExportJobRead,
Schema: map[string]*schema.Schema{
"id": {
Type: schema.TypeString,
Required: true,
Type: schema.TypeString,
Optional: true,
Computed: true,
Deprecated: fmt.Sprintf(constant.DeprecationParamByVersion, "1.18.0") + " Will not be an input parameter, only computed.",
},
"export_job_id": {
Type: schema.TypeString,
Expand Down Expand Up @@ -101,69 +103,9 @@ func DataSource() *schema.Resource {
}

func dataSourceMongoDBAtlasCloudBackupSnapshotsExportJobRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
// Get client connection.
conn := meta.(*config.MongoDBClient).Atlas
ids := conversion.DecodeStateID(d.Id())
projectID := ids["project_id"]
clusterName := ids["cluster_name"]
exportID := ids["export_job_id"]

exportJob, _, err := conn.CloudProviderSnapshotExportJobs.Get(ctx, projectID, clusterName, exportID)
exportJob, err := readExportJob(ctx, meta, d)
if err != nil {
return diag.Errorf("error getting snapshot export job information: %s", err)
}

if err := d.Set("export_job_id", exportJob.ID); err != nil {
return diag.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("snapshot_id", exportJob.SnapshotID); err != nil {
return diag.Errorf("error setting `snapshot_id` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("custom_data", flattenExportJobsCustomData(exportJob.CustomData)); err != nil {
return diag.Errorf("error setting `custom_data` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("components", flattenExportJobsComponents(exportJob.Components)); err != nil {
return diag.Errorf("error setting `components` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("created_at", exportJob.CreatedAt); err != nil {
return diag.Errorf("error setting `created_at` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("err_msg", exportJob.ErrMsg); err != nil {
return diag.Errorf("error setting `created_at` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("export_bucket_id", exportJob.ExportBucketID); err != nil {
return diag.Errorf("error setting `created_at` for snapshot export job (%s): %s", d.Id(), err)
}

if exportJob.ExportStatus != nil {
if err := d.Set("export_status_exported_collections", exportJob.ExportStatus.ExportedCollections); err != nil {
return diag.Errorf("error setting `export_status_exported_collections` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("export_status_total_collections", exportJob.ExportStatus.TotalCollections); err != nil {
return diag.Errorf("error setting `export_status_total_collections` for snapshot export job (%s): %s", d.Id(), err)
}
}

if err := d.Set("finished_at", exportJob.FinishedAt); err != nil {
return diag.Errorf("error setting `finished_at` for snapshot export job (%s): %s", d.Id(), err)
}

if err := d.Set("prefix", exportJob.Prefix); err != nil {
return diag.Errorf("error setting `prefix` for snapshot export job (%s): %s", d.Id(), err)
return diag.Errorf("error reading snapshot export job information: %s", err)
}

if err := d.Set("state", exportJob.State); err != nil {
return diag.Errorf("error setting `prefix` for snapshot export job (%s): %s", d.Id(), err)
}

d.SetId(exportJob.ID)

return nil
return setExportJobFields(d, exportJob)
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func Resource() *schema.Resource {
return &schema.Resource{
CreateContext: resourceMongoDBAtlasCloudBackupSnapshotExportJobCreate,
ReadContext: resourceMongoDBAtlasCloudBackupSnapshotExportJobRead,
DeleteContext: schema.NoopContext,
DeleteContext: resourceDelete,
Importer: &schema.ResourceImporter{
StateContext: resourceMongoDBAtlasCloudBackupSnapshotExportJobImportState,
},
Expand Down Expand Up @@ -121,17 +121,8 @@ func returnCloudBackupSnapshotExportJobSchema() map[string]*schema.Schema {
}

func resourceMongoDBAtlasCloudBackupSnapshotExportJobRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
// Get client connection.
conn := meta.(*config.MongoDBClient).Atlas
ids := conversion.DecodeStateID(d.Id())
projectID := ids["project_id"]
clusterName := ids["cluster_name"]
exportID := ids["export_job_id"]

exportJob, _, err := conn.CloudProviderSnapshotExportJobs.Get(ctx, projectID, clusterName, exportID)
exportJob, err := readExportJob(ctx, meta, d)
if err != nil {
// case 404
// deleted in the backend case
reset := strings.Contains(err.Error(), "404") && !d.IsNewResource()

if reset {
Expand All @@ -141,7 +132,37 @@ func resourceMongoDBAtlasCloudBackupSnapshotExportJobRead(ctx context.Context, d

return diag.Errorf("error getting snapshot export job information: %s", err)
}
return setExportJobFields(d, exportJob)
}

func readExportJob(ctx context.Context, meta any, d *schema.ResourceData) (*matlas.CloudProviderSnapshotExportJob, error) {
conn := meta.(*config.MongoDBClient).Atlas
projectID, clusterName, exportID := getRequiredFields(d)
if d.Id() != "" && (projectID == "" || clusterName == "" || exportID == "") {
ids := conversion.DecodeStateID(d.Id())
projectID = ids["project_id"]
clusterName = ids["cluster_name"]
exportID = ids["export_job_id"]
}
exportJob, _, err := conn.CloudProviderSnapshotExportJobs.Get(ctx, projectID, clusterName, exportID)
if err == nil {
d.SetId(conversion.EncodeStateID(map[string]string{
"project_id": projectID,
"cluster_name": clusterName,
"export_job_id": exportJob.ID,
}))
}
return exportJob, err
}

func getRequiredFields(d *schema.ResourceData) (projectID, clusterName, exportID string) {
projectID = d.Get("project_id").(string)
clusterName = d.Get("cluster_name").(string)
exportID = d.Get("export_job_id").(string)
return projectID, clusterName, exportID
}

func setExportJobFields(d *schema.ResourceData, exportJob *matlas.CloudProviderSnapshotExportJob) diag.Diagnostics {
if err := d.Set("export_job_id", exportJob.ID); err != nil {
return diag.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", d.Id(), err)
}
Expand Down Expand Up @@ -230,7 +251,6 @@ func flattenExportJobsCustomData(data []*matlas.CloudProviderSnapshotExportJobCu
}

func resourceMongoDBAtlasCloudBackupSnapshotExportJobCreate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
// Get client connection.
conn := meta.(*config.MongoDBClient).Atlas
projectID := d.Get("project_id").(string)
clusterName := d.Get("cluster_name").(string)
Expand All @@ -246,12 +266,9 @@ func resourceMongoDBAtlasCloudBackupSnapshotExportJobCreate(ctx context.Context,
return diag.Errorf("error creating snapshot export job: %s", err)
}

d.SetId(conversion.EncodeStateID(map[string]string{
"project_id": projectID,
"cluster_name": clusterName,
"export_job_id": jobResponse.ID,
}))

if err := d.Set("export_job_id", jobResponse.ID); err != nil {
return diag.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", jobResponse.ID, err)
}
return resourceMongoDBAtlasCloudBackupSnapshotExportJobRead(ctx, d, meta)
}

Expand Down Expand Up @@ -287,11 +304,19 @@ func resourceMongoDBAtlasCloudBackupSnapshotExportJobImportState(ctx context.Con
return nil, fmt.Errorf("couldn't import snapshot export job %s in project %s and cluster %s, error: %s", exportID, projectID, clusterName, err)
}

d.SetId(conversion.EncodeStateID(map[string]string{
"project_id": projectID,
"cluster_name": clusterName,
"export_job_id": exportID,
}))

if err := d.Set("project_id", projectID); err != nil {
return nil, fmt.Errorf("error setting `project_id` for snapshot export job (%s): %s", d.Id(), err)
}
if err := d.Set("cluster_name", clusterName); err != nil {
return nil, fmt.Errorf("error setting `cluster_name` for snapshot export job (%s): %s", d.Id(), err)
}
if err := d.Set("export_job_id", exportID); err != nil {
return nil, fmt.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", d.Id(), err)
}
return []*schema.ResourceData{d}, nil
}

func resourceDelete(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics {
d.SetId("")
return nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ import (
)

func TestMigBackupSnapshotExportJob_basic(t *testing.T) {
mig.CreateAndRunTest(t, basicTestCase(t))
mig.CreateAndRunTestNonParallel(t, basicTestCase(t))
}
Loading