diff --git a/docs/data-sources/packer_image.md b/docs/data-sources/packer_image.md index 290734972..334f365bc 100644 --- a/docs/data-sources/packer_image.md +++ b/docs/data-sources/packer_image.md @@ -38,11 +38,12 @@ output "packer-registry-ubuntu" { - `bucket_name` (String) The slug of the HCP Packer Registry image bucket to pull from. - `cloud_provider` (String) Name of the cloud provider this image is stored-in. -- `iteration_id` (String) HCP ID of this image. - `region` (String) Region this image is stored in, if any. ### Optional +- `channel` (String) Channel that promotes the latest iteration of the image. Either this or `iteration_id` must be specified. +- `iteration_id` (String) HCP ID of this image. Either this or `channel' must be specified. - `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) ### Read-Only diff --git a/internal/provider/data_source_packer_image.go b/internal/provider/data_source_packer_image.go index fddb5d5f6..d7931db82 100644 --- a/internal/provider/data_source_packer_image.go +++ b/internal/provider/data_source_packer_image.go @@ -2,9 +2,11 @@ package provider import ( "context" + "fmt" "log" "time" + packermodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models" sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -33,16 +35,24 @@ func dataSourcePackerImage() *schema.Resource { Type: schema.TypeString, Required: true, }, - "iteration_id": { - Description: "HCP ID of this image.", - Type: schema.TypeString, - Required: true, - }, "region": { Description: "Region this image is stored in, if any.", Type: schema.TypeString, Required: true, }, + // Optional inputs + "iteration_id": { + Description: "HCP ID of this image. Either this or `channel' must be specified.", + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: []string{"channel"}, + }, + "channel": { + Description: "Channel that promotes the latest iteration of the image. Either this or `iteration_id` must be specified.", + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: []string{"iteration_id"}, + }, // computed outputs "organization_id": { Description: "The ID of the organization this HCP Packer registry is located in.", @@ -95,9 +105,10 @@ func dataSourcePackerImage() *schema.Resource { func dataSourcePackerImageRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { bucketName := d.Get("bucket_name").(string) - iterationID := d.Get("iteration_id").(string) cloudProvider := d.Get("cloud_provider").(string) region := d.Get("region").(string) + channelName := d.Get("channel") + iterationID := d.Get("iteration_id") client := meta.(*clients.Client) loc := &sharedmodels.HashicorpCloudLocationLocation{ @@ -111,9 +122,49 @@ func dataSourcePackerImageRead(ctx context.Context, d *schema.ResourceData, meta log.Printf("[INFO] Reading HCP Packer registry (%s) [project_id=%s, organization_id=%s, iteration_id=%s]", bucketName, loc.ProjectID, loc.OrganizationID, iterationID) - iteration, err := clients.GetIterationFromId(ctx, client, loc, bucketName, iterationID) - if err != nil { - return diag.FromErr(err) + var iteration *packermodels.HashicorpCloudPackerIteration + var err error + + if iterID, ok := iterationID.(string); ok && iterID != "" { + iteration, err = clients.GetIterationFromId( + ctx, + client, + loc, + bucketName, + iterID) + if err != nil { + return diag.FromErr(err) + } + } + + var channel *packermodels.HashicorpCloudPackerChannel + + if chanSlug, ok := channelName.(string); ok && chanSlug != "" { + channel, err = clients.GetPackerChannelBySlug( + ctx, + client, + loc, + bucketName, + chanSlug) + if err != nil { + return diag.FromErr(err) + } + } + + var diags diag.Diagnostics + + if channel != nil && iteration != nil { + return diag.FromErr(fmt.Errorf( + "iteration mismatch: channel %s's iteration (%s) is different from the explicitely specified iteration: %s", + channel.Slug, + channel.Iteration.ID, + iteration.ID)) + } + + // Assuming we passed the above check, the rest of the channel is not + // used after that, + if channel != nil { + iteration = channel.Iteration } found := false @@ -159,5 +210,5 @@ func dataSourcePackerImageRead(ctx context.Context, d *schema.ResourceData, meta return diag.Errorf("Unable to load image with region %s and cloud %s for iteration %s.", region, cloudProvider, iterationID) } - return nil + return diags } diff --git a/internal/provider/data_source_packer_image_test.go b/internal/provider/data_source_packer_image_test.go index e3cffcd2d..5afb7f29d 100644 --- a/internal/provider/data_source_packer_image_test.go +++ b/internal/provider/data_source_packer_image_test.go @@ -3,6 +3,7 @@ package provider import ( "fmt" "math/rand" + "regexp" "testing" "time" @@ -14,6 +15,7 @@ import ( const ( acctestImageBucket = "alpine-acctest-imagetest" acctestUbuntuImageBucket = "ubuntu-acctest-imagetest" + acctestArchImageBucket = "arch-acctest-imagetest" acctestImageChannel = "production-image-test" ) @@ -50,6 +52,25 @@ var ( region = "us-east-1" } `, acctestUbuntuImageBucket, acctestImageChannel, acctestUbuntuImageBucket) + + testAccPackerImageBothChanAndIter = fmt.Sprintf(` + data "hcp_packer_image" "arch-btw" { + bucket_name = %q + cloud_provider = "aws" + iteration_id = "234567" + channel = "chanSlug" + region = "us-east-1" + } +`, acctestArchImageBucket) + + testAccPackerImageArchProduction = fmt.Sprintf(` + data "hcp_packer_image" "arch-btw" { + bucket_name = %q + cloud_provider = "aws" + channel = %q + region = "us-east-1" + } +`, acctestArchImageBucket, acctestImageChannel) ) func TestAcc_dataSourcePackerImage(t *testing.T) { @@ -134,3 +155,62 @@ func TestAcc_dataSourcePackerImage_revokedIteration(t *testing.T) { }, }) } + +func TestAcc_dataSourcePackerImage_channelAndIterationIDReject(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + // basically just testing that we don't pass validation here + { + PlanOnly: true, + Config: testConfig(testAccPackerImageBothChanAndIter), + ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), + }, + }, + }) +} + +func TestAcc_dataSourcePackerImage_channelAccept(t *testing.T) { + fingerprint := "acceptChannel" + resourceName := "data.hcp_packer_image.arch-btw" + var buildID string + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + PlanOnly: true, + PreConfig: func() { + // CheckDestroy doesn't get called when the test fails and doesn't + // produce any tf state. In this case we destroy any existing resource + // before creating them. + deleteChannel(t, acctestArchImageBucket, acctestImageChannel, false) + deleteIteration(t, acctestArchImageBucket, fingerprint, false) + deleteBucket(t, acctestArchImageBucket, false) + + upsertRegistry(t) + upsertBucket(t, acctestArchImageBucket) + upsertIteration(t, acctestArchImageBucket, fingerprint) + itID, err := getIterationIDFromFingerPrint(t, acctestArchImageBucket, fingerprint) + if err != nil { + t.Fatal(err.Error()) + } + upsertBuild(t, acctestArchImageBucket, fingerprint, itID) + createChannel(t, acctestArchImageBucket, acctestImageChannel, itID) + + buildID, err = getBuildIDFromIteration(t, acctestArchImageBucket, itID, "aws") + if err != nil { + t.Fatal(err.Error()) + } + }, + Config: testConfig(testAccPackerImageArchProduction), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "organization_id"), + resource.TestCheckResourceAttrSet(resourceName, "project_id"), + resource.TestCheckResourceAttr(resourceName, "iteration-id", buildID), + ), + }, + }, + }) +}