diff --git a/.changelog/2234.txt b/.changelog/2234.txt new file mode 100644 index 0000000000..029c292973 --- /dev/null +++ b/.changelog/2234.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +data-source/cloud_backup_snapshot_export_job: Marks `id` as computed not required +``` diff --git a/internal/service/cloudbackupsnapshotexportjob/data_source_cloud_backup_snapshot_export_job.go b/internal/service/cloudbackupsnapshotexportjob/data_source_cloud_backup_snapshot_export_job.go index 4d797b93bb..54262d2a2a 100644 --- a/internal/service/cloudbackupsnapshotexportjob/data_source_cloud_backup_snapshot_export_job.go +++ b/internal/service/cloudbackupsnapshotexportjob/data_source_cloud_backup_snapshot_export_job.go @@ -2,11 +2,11 @@ package cloudbackupsnapshotexportjob import ( "context" + "fmt" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" - "github.com/mongodb/terraform-provider-mongodbatlas/internal/config" + "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/constant" ) func DataSource() *schema.Resource { @@ -14,8 +14,10 @@ func DataSource() *schema.Resource { ReadContext: dataSourceMongoDBAtlasCloudBackupSnapshotsExportJobRead, Schema: map[string]*schema.Schema{ "id": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Optional: true, + Computed: true, + Deprecated: fmt.Sprintf(constant.DeprecationParamByVersion, "1.18.0") + " Will not be an input parameter, only computed.", }, "export_job_id": { Type: schema.TypeString, @@ -101,69 +103,9 @@ func DataSource() *schema.Resource { } func dataSourceMongoDBAtlasCloudBackupSnapshotsExportJobRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - // Get client connection. - conn := meta.(*config.MongoDBClient).Atlas - ids := conversion.DecodeStateID(d.Id()) - projectID := ids["project_id"] - clusterName := ids["cluster_name"] - exportID := ids["export_job_id"] - - exportJob, _, err := conn.CloudProviderSnapshotExportJobs.Get(ctx, projectID, clusterName, exportID) + exportJob, err := readExportJob(ctx, meta, d) if err != nil { - return diag.Errorf("error getting snapshot export job information: %s", err) - } - - if err := d.Set("export_job_id", exportJob.ID); err != nil { - return diag.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("snapshot_id", exportJob.SnapshotID); err != nil { - return diag.Errorf("error setting `snapshot_id` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("custom_data", flattenExportJobsCustomData(exportJob.CustomData)); err != nil { - return diag.Errorf("error setting `custom_data` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("components", flattenExportJobsComponents(exportJob.Components)); err != nil { - return diag.Errorf("error setting `components` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("created_at", exportJob.CreatedAt); err != nil { - return diag.Errorf("error setting `created_at` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("err_msg", exportJob.ErrMsg); err != nil { - return diag.Errorf("error setting `created_at` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("export_bucket_id", exportJob.ExportBucketID); err != nil { - return diag.Errorf("error setting `created_at` for snapshot export job (%s): %s", d.Id(), err) - } - - if exportJob.ExportStatus != nil { - if err := d.Set("export_status_exported_collections", exportJob.ExportStatus.ExportedCollections); err != nil { - return diag.Errorf("error setting `export_status_exported_collections` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("export_status_total_collections", exportJob.ExportStatus.TotalCollections); err != nil { - return diag.Errorf("error setting `export_status_total_collections` for snapshot export job (%s): %s", d.Id(), err) - } - } - - if err := d.Set("finished_at", exportJob.FinishedAt); err != nil { - return diag.Errorf("error setting `finished_at` for snapshot export job (%s): %s", d.Id(), err) - } - - if err := d.Set("prefix", exportJob.Prefix); err != nil { - return diag.Errorf("error setting `prefix` for snapshot export job (%s): %s", d.Id(), err) + return diag.Errorf("error reading snapshot export job information: %s", err) } - - if err := d.Set("state", exportJob.State); err != nil { - return diag.Errorf("error setting `prefix` for snapshot export job (%s): %s", d.Id(), err) - } - - d.SetId(exportJob.ID) - - return nil + return setExportJobFields(d, exportJob) } diff --git a/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job.go b/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job.go index 9063217264..8405ac9a1b 100644 --- a/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job.go +++ b/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job.go @@ -17,7 +17,7 @@ func Resource() *schema.Resource { return &schema.Resource{ CreateContext: resourceMongoDBAtlasCloudBackupSnapshotExportJobCreate, ReadContext: resourceMongoDBAtlasCloudBackupSnapshotExportJobRead, - DeleteContext: schema.NoopContext, + DeleteContext: resourceDelete, Importer: &schema.ResourceImporter{ StateContext: resourceMongoDBAtlasCloudBackupSnapshotExportJobImportState, }, @@ -121,17 +121,8 @@ func returnCloudBackupSnapshotExportJobSchema() map[string]*schema.Schema { } func resourceMongoDBAtlasCloudBackupSnapshotExportJobRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - // Get client connection. - conn := meta.(*config.MongoDBClient).Atlas - ids := conversion.DecodeStateID(d.Id()) - projectID := ids["project_id"] - clusterName := ids["cluster_name"] - exportID := ids["export_job_id"] - - exportJob, _, err := conn.CloudProviderSnapshotExportJobs.Get(ctx, projectID, clusterName, exportID) + exportJob, err := readExportJob(ctx, meta, d) if err != nil { - // case 404 - // deleted in the backend case reset := strings.Contains(err.Error(), "404") && !d.IsNewResource() if reset { @@ -141,7 +132,37 @@ func resourceMongoDBAtlasCloudBackupSnapshotExportJobRead(ctx context.Context, d return diag.Errorf("error getting snapshot export job information: %s", err) } + return setExportJobFields(d, exportJob) +} +func readExportJob(ctx context.Context, meta any, d *schema.ResourceData) (*matlas.CloudProviderSnapshotExportJob, error) { + conn := meta.(*config.MongoDBClient).Atlas + projectID, clusterName, exportID := getRequiredFields(d) + if d.Id() != "" && (projectID == "" || clusterName == "" || exportID == "") { + ids := conversion.DecodeStateID(d.Id()) + projectID = ids["project_id"] + clusterName = ids["cluster_name"] + exportID = ids["export_job_id"] + } + exportJob, _, err := conn.CloudProviderSnapshotExportJobs.Get(ctx, projectID, clusterName, exportID) + if err == nil { + d.SetId(conversion.EncodeStateID(map[string]string{ + "project_id": projectID, + "cluster_name": clusterName, + "export_job_id": exportJob.ID, + })) + } + return exportJob, err +} + +func getRequiredFields(d *schema.ResourceData) (projectID, clusterName, exportID string) { + projectID = d.Get("project_id").(string) + clusterName = d.Get("cluster_name").(string) + exportID = d.Get("export_job_id").(string) + return projectID, clusterName, exportID +} + +func setExportJobFields(d *schema.ResourceData, exportJob *matlas.CloudProviderSnapshotExportJob) diag.Diagnostics { if err := d.Set("export_job_id", exportJob.ID); err != nil { return diag.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", d.Id(), err) } @@ -230,7 +251,6 @@ func flattenExportJobsCustomData(data []*matlas.CloudProviderSnapshotExportJobCu } func resourceMongoDBAtlasCloudBackupSnapshotExportJobCreate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - // Get client connection. conn := meta.(*config.MongoDBClient).Atlas projectID := d.Get("project_id").(string) clusterName := d.Get("cluster_name").(string) @@ -246,12 +266,9 @@ func resourceMongoDBAtlasCloudBackupSnapshotExportJobCreate(ctx context.Context, return diag.Errorf("error creating snapshot export job: %s", err) } - d.SetId(conversion.EncodeStateID(map[string]string{ - "project_id": projectID, - "cluster_name": clusterName, - "export_job_id": jobResponse.ID, - })) - + if err := d.Set("export_job_id", jobResponse.ID); err != nil { + return diag.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", jobResponse.ID, err) + } return resourceMongoDBAtlasCloudBackupSnapshotExportJobRead(ctx, d, meta) } @@ -287,11 +304,19 @@ func resourceMongoDBAtlasCloudBackupSnapshotExportJobImportState(ctx context.Con return nil, fmt.Errorf("couldn't import snapshot export job %s in project %s and cluster %s, error: %s", exportID, projectID, clusterName, err) } - d.SetId(conversion.EncodeStateID(map[string]string{ - "project_id": projectID, - "cluster_name": clusterName, - "export_job_id": exportID, - })) - + if err := d.Set("project_id", projectID); err != nil { + return nil, fmt.Errorf("error setting `project_id` for snapshot export job (%s): %s", d.Id(), err) + } + if err := d.Set("cluster_name", clusterName); err != nil { + return nil, fmt.Errorf("error setting `cluster_name` for snapshot export job (%s): %s", d.Id(), err) + } + if err := d.Set("export_job_id", exportID); err != nil { + return nil, fmt.Errorf("error setting `export_job_id` for snapshot export job (%s): %s", d.Id(), err) + } return []*schema.ResourceData{d}, nil } + +func resourceDelete(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + d.SetId("") + return nil +} diff --git a/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_migration_test.go b/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_migration_test.go index d3e1a7dbad..d4dbc3b78a 100644 --- a/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_migration_test.go +++ b/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_migration_test.go @@ -7,5 +7,5 @@ import ( ) func TestMigBackupSnapshotExportJob_basic(t *testing.T) { - mig.CreateAndRunTest(t, basicTestCase(t)) + mig.CreateAndRunTestNonParallel(t, basicTestCase(t)) } diff --git a/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_test.go b/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_test.go index 6d1cbc748f..662a22a09f 100644 --- a/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_test.go +++ b/internal/service/cloudbackupsnapshotexportjob/resource_cloud_backup_snapshot_export_job_test.go @@ -8,7 +8,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion" "github.com/mongodb/terraform-provider-mongodbatlas/internal/testutil/acc" ) @@ -19,7 +18,7 @@ var ( ) func TestAccBackupSnapshotExportJob_basic(t *testing.T) { - resource.ParallelTest(t, *basicTestCase(t)) + resource.Test(t, *basicTestCase(t)) } func basicTestCase(tb testing.TB) *resource.TestCase { @@ -27,36 +26,41 @@ func basicTestCase(tb testing.TB) *resource.TestCase { acc.SkipTestForCI(tb) // needs AWS IAM role and S3 bucket var ( - projectID = os.Getenv("MONGODB_ATLAS_PROJECT_ID") - bucketName = os.Getenv("AWS_S3_BUCKET") - iamRoleID = os.Getenv("IAM_ROLE_ID") + clusterInfo = acc.GetClusterInfo(tb, &acc.ClusterRequest{CloudBackup: true}) + bucketName = os.Getenv("AWS_S3_BUCKET") + iamRoleID = os.Getenv("IAM_ROLE_ID") + projectID = acc.ProjectIDExecution(tb) + clusterName = clusterInfo.ClusterName + attrsSet = []string{ + "id", + "export_job_id", + "project_id", + "cluster_name", + "snapshot_id", + "export_bucket_id", + } + attrsMapWithProject = map[string]string{ + "project_id": projectID, + } + attrsPluralDS = map[string]string{ + "project_id": projectID, + "results.0.custom_data.0.key": "exported by", + } ) + checks := []resource.TestCheckFunc{checkExists(resourceName)} + checks = acc.AddAttrChecks(resourceName, checks, attrsMapWithProject) + checks = acc.AddAttrSetChecks(resourceName, checks, attrsSet...) + checks = acc.AddAttrChecks(dataSourceName, checks, attrsMapWithProject) + checks = acc.AddAttrSetChecks(dataSourceName, checks, attrsSet...) + checks = acc.AddAttrChecks(dataSourcePluralName, checks, attrsPluralDS) return &resource.TestCase{ PreCheck: func() { acc.PreCheck(tb); acc.PreCheckS3Bucket(tb) }, ProtoV6ProviderFactories: acc.TestAccProviderV6Factories, - CheckDestroy: checkDestroy, Steps: []resource.TestStep{ { - Config: configBasic(projectID, bucketName, iamRoleID), - Check: resource.ComposeTestCheckFunc( - checkExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "project_id", projectID), - resource.TestCheckResourceAttr(resourceName, "bucket_name", "example-bucket"), - resource.TestCheckResourceAttr(resourceName, "cloud_provider", "AWS"), - resource.TestCheckResourceAttrSet(resourceName, "iam_role_id"), - - resource.TestCheckResourceAttr(dataSourceName, "project_id", projectID), - resource.TestCheckResourceAttr(dataSourceName, "bucket_name", "example-bucket"), - resource.TestCheckResourceAttr(dataSourceName, "cloud_provider", "AWS"), - resource.TestCheckResourceAttrSet(dataSourceName, "iam_role_id"), - - resource.TestCheckResourceAttr(dataSourcePluralName, "project_id", projectID), - resource.TestCheckResourceAttr(dataSourcePluralName, "bucket_name", "example-bucket"), - resource.TestCheckResourceAttr(dataSourcePluralName, "cloud_provider", "AWS"), - resource.TestCheckResourceAttrSet(dataSourcePluralName, "iam_role_id"), - resource.TestCheckResourceAttr(dataSourcePluralName, "results.#", "1"), - ), + Config: configBasic(projectID, bucketName, iamRoleID, clusterName), + Check: resource.ComposeTestCheckFunc(checks...), }, { ResourceName: resourceName, @@ -74,30 +78,16 @@ func checkExists(resourceName string) resource.TestCheckFunc { if !ok { return fmt.Errorf("not found: %s", resourceName) } - if rs.Primary.ID == "" { - return fmt.Errorf("no ID is set") + projectID, clusterName, exportJobID, err := readRequired(rs, resourceName) + if err != nil { + return err } - ids := conversion.DecodeStateID(rs.Primary.ID) - _, _, err := acc.Conn().CloudProviderSnapshotExportJobs.Get(context.Background(), ids["project_id"], ids["cluster_name"], ids["export_job_id"]) + _, _, err = acc.Conn().CloudProviderSnapshotExportJobs.Get(context.Background(), projectID, clusterName, exportJobID) if err == nil { return nil } - return fmt.Errorf("snapshot export job (%s) does not exist", ids["export_job_id"]) - } -} - -func checkDestroy(state *terraform.State) error { - for _, rs := range state.RootModule().Resources { - if rs.Type != "mongodbatlas_cloud_backup_snapshot_export_job" { - continue - } - ids := conversion.DecodeStateID(rs.Primary.ID) - snapshotExportBucket, _, err := acc.Conn().CloudProviderSnapshotExportJobs.Get(context.Background(), ids["project_id"], ids["cluster_name"], ids["export_job_id"]) - if err == nil && snapshotExportBucket != nil { - return fmt.Errorf("snapshot export job (%s) still exists", ids["export_job_id"]) - } + return fmt.Errorf("snapshot export job (%s) does not exist", exportJobID) } - return nil } func importStateIDFunc(resourceName string) resource.ImportStateIdFunc { @@ -106,58 +96,69 @@ func importStateIDFunc(resourceName string) resource.ImportStateIdFunc { if !ok { return "", fmt.Errorf("not found: %s", resourceName) } - ids := conversion.DecodeStateID(rs.Primary.ID) - return fmt.Sprintf("%s-%s-%s", ids["project_id"], ids["cluster_name"], ids["export_job_id"]), nil + projectID, clusterName, exportJobID, err := readRequired(rs, resourceName) + if err != nil { + return "", err + } + return fmt.Sprintf("%s--%s--%s", projectID, clusterName, exportJobID), err } } -func configBasic(projectID, bucketName, iamRoleID string) string { - return fmt.Sprintf(` -resource "mongodbatlas_cluster" "my_cluster" { - project_id = var.project_id - name = "MyCluster" - disk_size_gb = 1 - provider_name = "AWS" - provider_region_name = "US_EAST_1" - provider_instance_size_name = "M10" - cloud_backup = true // enable cloud backup snapshots +func readRequired(rs *terraform.ResourceState, resourceName string) (projectID, clusterName, exportJobID string, err error) { + projectID, ok := rs.Primary.Attributes["project_id"] + if !ok { + err = fmt.Errorf("project_id not defined in resource: %s", resourceName) + } + clusterName, ok = rs.Primary.Attributes["cluster_name"] + if !ok { + err = fmt.Errorf("cluster_name not defined in resource: %s", resourceName) + } + exportJobID, ok = rs.Primary.Attributes["export_job_id"] + if !ok { + err = fmt.Errorf("export_job_id not defined in resource: %s", resourceName) + } + return projectID, clusterName, exportJobID, err } +func configBasic(projectID, bucketName, iamRoleID, clusterName string) string { + return fmt.Sprintf(` resource "mongodbatlas_cloud_backup_snapshot" "test" { - project_id = var.project_id - cluster_name = mongodbatlas_cluster.my_cluster.name - description = "myDescription" + project_id = %[1]q + cluster_name = %[4]q + description = "tf-acc-test" retention_in_days = 1 } resource "mongodbatlas_cloud_backup_snapshot_export_bucket" "test" { - project_id = "%[1]s" + project_id = %[1]q iam_role_id = "%[3]s" bucket_name = "%[2]s" cloud_provider = "AWS" } resource "mongodbatlas_cloud_backup_snapshot_export_job" "test" { - project_id = var.project_id - cluster_name = mongodbatlas_cluster.my_cluster.name - snapshot_id = mongodbatlas_cloud_backup_snapshot.test.snapshot_id - export_bucket_id = mongodbatlas_cloud_backup_snapshot_export_bucket.test.export_bucket_id + project_id = %[1]q + cluster_name = %[4]q + snapshot_id = mongodbatlas_cloud_backup_snapshot.test.snapshot_id + export_bucket_id = mongodbatlas_cloud_backup_snapshot_export_bucket.test.export_bucket_id custom_data { key = "exported by" - value = "myName" + value = "tf-acc-test" } } data "mongodbatlas_cloud_backup_snapshot_export_job" "test" { - project_id = "%[1]s" - cluster_name = mongodbatlas_cluster.my_cluster.name - export_job_id = mongodbatlas_cloud_backup_snapshot_export_job.myjob.export_job_id + project_id = %[1]q + cluster_name = %[4]q + export_job_id = mongodbatlas_cloud_backup_snapshot_export_job.test.export_job_id } - + data "mongodbatlas_cloud_backup_snapshot_export_jobs" "test" { - project_id = mongodbatlas_cloud_backup_snapshot_export_bucket.test.project_id - cluster_name = mongodbatlas_cluster.my_cluster.name + depends_on = [mongodbatlas_cloud_backup_snapshot_export_job.test] + project_id = %[1]q + cluster_name = %[4]q } -`, projectID, bucketName, iamRoleID) + +`, projectID, bucketName, iamRoleID, clusterName) }