diff --git a/docs/data-sources/packer_image.md b/docs/data-sources/packer_image.md index 290734972..aacb4cf3c 100644 --- a/docs/data-sources/packer_image.md +++ b/docs/data-sources/packer_image.md @@ -2,15 +2,36 @@ page_title: "Data Source hcp_packer_image - terraform-provider-hcp" subcategory: "" description: |- - The Packer Image data source iteration gets the most recent iteration (or build) of an image, given an iteration id. + The Packer Image data source iteration gets the most recent iteration (or build) of an image, given an iteration id or a channel. --- # hcp_packer_image (Data Source) -The Packer Image data source iteration gets the most recent iteration (or build) of an image, given an iteration id. +The Packer Image data source iteration gets the most recent iteration (or build) of an image, given an iteration id or a channel. ## Example Usage +### Single image sourcing + +```terraform +data "hcp_packer_image" "baz" { + bucket_name = "hardened-ubuntu-16-04" + cloud_provider = "aws" + channel = "production" + region = "us-east-1" +} + +output "packer-registry-ubuntu-east-1" { + value = data.hcp_packer_image.baz.cloud_image_id +} +``` + +~> **Note:** The `channel` attribute in this data source may incur a billable request to HCP Packer. This attribute is intended for convenience when using a single image. When sourcing multiple images from a single iteration, the `hcp_packer_iteration` data source is the alternative for querying a channel just once. + +~> **Note:** This data source only returns the first found image's metadata filtered by the given schema values, from the returned list of images associated with the specified iteration. Therefore, if multiple images exist in the same region, it will only pick one of them. If that's the case, you may consider separating your builds into different buckets. + +### Multiple image sourcing from a single iteration + ```terraform data "hcp_packer_iteration" "hardened-source" { bucket_name = "hardened-ubuntu-16-04" @@ -24,9 +45,20 @@ data "hcp_packer_image" "foo" { region = "us-east-1" } -output "packer-registry-ubuntu" { +data "hcp_packer_image" "bar" { + bucket_name = "hardened-ubuntu-16-04" + cloud_provider = "aws" + iteration_id = data.hcp_packer_iteration.hardened-source.ulid + region = "us-west-1" +} + +output "packer-registry-ubuntu-east-1" { value = data.hcp_packer_image.foo.cloud_image_id } + +output "packer-registry-ubuntu-west-1" { + value = data.hcp_packer_image.bar.cloud_image_id +} ``` ~> **Note:** This data source only returns the first found image's metadata filtered by the given schema values, from the returned list of images associated with the specified iteration. Therefore, if multiple images exist in the same region, it will only pick one of them. If that's the case, you may consider separating your builds into different buckets. @@ -38,11 +70,12 @@ output "packer-registry-ubuntu" { - `bucket_name` (String) The slug of the HCP Packer Registry image bucket to pull from. - `cloud_provider` (String) Name of the cloud provider this image is stored-in. -- `iteration_id` (String) HCP ID of this image. - `region` (String) Region this image is stored in, if any. ### Optional +- `channel` (String) The channel that points to the version of the image being retrieved. Either this or `iteration_id` must be specified. Note: will incur a billable request +- `iteration_id` (String) The iteration from which to get the image. Either this or `channel` must be specified. - `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) ### Read-Only diff --git a/examples/data-sources/hcp_packer_image/data-source-alt.tf b/examples/data-sources/hcp_packer_image/data-source-alt.tf new file mode 100644 index 000000000..b65674575 --- /dev/null +++ b/examples/data-sources/hcp_packer_image/data-source-alt.tf @@ -0,0 +1,10 @@ +data "hcp_packer_image" "baz" { + bucket_name = "hardened-ubuntu-16-04" + cloud_provider = "aws" + channel = "production" + region = "us-east-1" +} + +output "packer-registry-ubuntu-east-1" { + value = data.hcp_packer_image.baz.cloud_image_id +} diff --git a/examples/data-sources/hcp_packer_image/data-source.tf b/examples/data-sources/hcp_packer_image/data-source.tf index 4d145e3de..d3bcf4e18 100644 --- a/examples/data-sources/hcp_packer_image/data-source.tf +++ b/examples/data-sources/hcp_packer_image/data-source.tf @@ -10,6 +10,17 @@ data "hcp_packer_image" "foo" { region = "us-east-1" } -output "packer-registry-ubuntu" { +data "hcp_packer_image" "bar" { + bucket_name = "hardened-ubuntu-16-04" + cloud_provider = "aws" + iteration_id = data.hcp_packer_iteration.hardened-source.ulid + region = "us-west-1" +} + +output "packer-registry-ubuntu-east-1" { value = data.hcp_packer_image.foo.cloud_image_id } + +output "packer-registry-ubuntu-west-1" { + value = data.hcp_packer_image.bar.cloud_image_id +} diff --git a/internal/provider/data_source_packer_image.go b/internal/provider/data_source_packer_image.go index fddb5d5f6..1af6031aa 100644 --- a/internal/provider/data_source_packer_image.go +++ b/internal/provider/data_source_packer_image.go @@ -5,6 +5,7 @@ import ( "log" "time" + packermodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models" sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -15,7 +16,7 @@ var defaultPackerTimeout = time.Minute func dataSourcePackerImage() *schema.Resource { return &schema.Resource{ - Description: "The Packer Image data source iteration gets the most recent iteration (or build) of an image, given an iteration id.", + Description: "The Packer Image data source iteration gets the most recent iteration (or build) of an image, given an iteration id or a channel.", ReadContext: dataSourcePackerImageRead, Timeouts: &schema.ResourceTimeout{ Default: &defaultPackerTimeout, @@ -33,16 +34,24 @@ func dataSourcePackerImage() *schema.Resource { Type: schema.TypeString, Required: true, }, - "iteration_id": { - Description: "HCP ID of this image.", - Type: schema.TypeString, - Required: true, - }, "region": { Description: "Region this image is stored in, if any.", Type: schema.TypeString, Required: true, }, + // Optional inputs + "iteration_id": { + Description: "The iteration from which to get the image. Either this or `channel` must be specified.", + Type: schema.TypeString, + Optional: true, + Computed: true, + ExactlyOneOf: []string{"channel"}, + }, + "channel": { + Description: "The channel that points to the version of the image being retrieved. Either this or `iteration_id` must be specified. Note: will incur a billable request", + Type: schema.TypeString, + Optional: true, + }, // computed outputs "organization_id": { Description: "The ID of the organization this HCP Packer registry is located in.", @@ -95,9 +104,10 @@ func dataSourcePackerImage() *schema.Resource { func dataSourcePackerImageRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { bucketName := d.Get("bucket_name").(string) - iterationID := d.Get("iteration_id").(string) cloudProvider := d.Get("cloud_provider").(string) region := d.Get("region").(string) + channelSlug := d.Get("channel").(string) + iterationID := d.Get("iteration_id").(string) client := meta.(*clients.Client) loc := &sharedmodels.HashicorpCloudLocationLocation{ @@ -109,11 +119,41 @@ func dataSourcePackerImageRead(ctx context.Context, d *schema.ResourceData, meta return diag.FromErr(err) } - log.Printf("[INFO] Reading HCP Packer registry (%s) [project_id=%s, organization_id=%s, iteration_id=%s]", bucketName, loc.ProjectID, loc.OrganizationID, iterationID) + log.Printf("[INFO] Reading HCP Packer registry (%s) [project_id=%s, organization_id=%s, channel=%s/iteration_id=%s]", bucketName, loc.ProjectID, loc.OrganizationID, channelSlug, iterationID) - iteration, err := clients.GetIterationFromId(ctx, client, loc, bucketName, iterationID) - if err != nil { - return diag.FromErr(err) + var iteration *packermodels.HashicorpCloudPackerIteration + var err error + + if iterationID != "" { + iteration, err = clients.GetIterationFromId( + ctx, + client, + loc, + bucketName, + iterationID) + if err != nil { + return diag.FromErr(err) + } + } + + var channel *packermodels.HashicorpCloudPackerChannel + + if channelSlug != "" { + channel, err = clients.GetPackerChannelBySlug( + ctx, + client, + loc, + bucketName, + channelSlug) + if err != nil { + return diag.FromErr(err) + } + } + + // Assuming we passed the above check, the rest of the channel is not + // used after that, + if channel != nil { + iteration = channel.Iteration } found := false diff --git a/internal/provider/data_source_packer_image_test.go b/internal/provider/data_source_packer_image_test.go index e3cffcd2d..1fd6f976e 100644 --- a/internal/provider/data_source_packer_image_test.go +++ b/internal/provider/data_source_packer_image_test.go @@ -3,6 +3,7 @@ package provider import ( "fmt" "math/rand" + "regexp" "testing" "time" @@ -14,6 +15,7 @@ import ( const ( acctestImageBucket = "alpine-acctest-imagetest" acctestUbuntuImageBucket = "ubuntu-acctest-imagetest" + acctestArchImageBucket = "arch-acctest-imagetest" acctestImageChannel = "production-image-test" ) @@ -50,6 +52,40 @@ var ( region = "us-east-1" } `, acctestUbuntuImageBucket, acctestImageChannel, acctestUbuntuImageBucket) + + testAccPackerImageBothChanAndIter = fmt.Sprintf(` + data "hcp_packer_image" "arch-btw" { + bucket_name = %q + cloud_provider = "aws" + iteration_id = "234567" + channel = "chanSlug" + region = "us-east-1" + } +`, acctestArchImageBucket) + + testAccPackerImageBothChanAndIterRef = fmt.Sprintf(` + data "hcp_packer_iteration" "arch-imagetest" { + bucket_name = %q + channel = %q + } + + data "hcp_packer_image" "arch-btw" { + bucket_name = %q + cloud_provider = "aws" + iteration_id = data.hcp_packer_iteration.arch-imagetest.id + channel = %q + region = "us-east-1" + } +`, acctestArchImageBucket, acctestImageChannel, acctestArchImageBucket, acctestImageChannel) + + testAccPackerImageArchProduction = fmt.Sprintf(` + data "hcp_packer_image" "arch-btw" { + bucket_name = %q + cloud_provider = "aws" + channel = %q + region = "us-east-1" + } +`, acctestArchImageBucket, acctestImageChannel) ) func TestAcc_dataSourcePackerImage(t *testing.T) { @@ -134,3 +170,78 @@ func TestAcc_dataSourcePackerImage_revokedIteration(t *testing.T) { }, }) } + +func TestAcc_dataSourcePackerImage_channelAndIterationIDReject(t *testing.T) { + fingerprint := "rejectIterationAndChannel" + configs := []string{ + testAccPackerImageBothChanAndIter, + testAccPackerImageBothChanAndIterRef, + } + + for _, cfg := range configs { + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + // basically just testing that we don't pass validation here + { + PlanOnly: true, + PreConfig: func() { + deleteChannel(t, acctestArchImageBucket, acctestImageChannel, false) + deleteIteration(t, acctestArchImageBucket, fingerprint, false) + deleteBucket(t, acctestArchImageBucket, false) + + upsertRegistry(t) + upsertBucket(t, acctestArchImageBucket) + upsertIteration(t, acctestArchImageBucket, fingerprint) + itID, err := getIterationIDFromFingerPrint(t, acctestArchImageBucket, fingerprint) + if err != nil { + t.Fatal(err.Error()) + } + upsertBuild(t, acctestArchImageBucket, fingerprint, itID) + createChannel(t, acctestArchImageBucket, acctestImageChannel, itID) + }, + Config: testConfig(cfg), + ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), + }, + }, + }) + } +} + +func TestAcc_dataSourcePackerImage_channelAccept(t *testing.T) { + fingerprint := "acceptChannel" + resourceName := "data.hcp_packer_image.arch-btw" + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t, map[string]bool{"aws": false, "azure": false}) }, + ProviderFactories: providerFactories, + CheckDestroy: func(*terraform.State) error { + deleteChannel(t, acctestArchImageBucket, acctestImageChannel, false) + deleteIteration(t, acctestArchImageBucket, fingerprint, false) + deleteBucket(t, acctestArchImageBucket, false) + return nil + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + upsertRegistry(t) + upsertBucket(t, acctestArchImageBucket) + upsertIteration(t, acctestArchImageBucket, fingerprint) + itID, err := getIterationIDFromFingerPrint(t, acctestArchImageBucket, fingerprint) + if err != nil { + t.Fatal(err.Error()) + } + upsertBuild(t, acctestArchImageBucket, fingerprint, itID) + createChannel(t, acctestArchImageBucket, acctestImageChannel, itID) + }, + Config: testConfig(testAccPackerImageArchProduction), + Check: resource.ComposeTestCheckFunc( + // build_id is only known at runtime + // and the test works on a reset value, + // therefore we can only check it's set + resource.TestCheckResourceAttrSet(resourceName, "build_id"), + ), + }, + }, + }) +} diff --git a/templates/data-sources/packer_image.md.tmpl b/templates/data-sources/packer_image.md.tmpl index 664658a5a..377d00f64 100644 --- a/templates/data-sources/packer_image.md.tmpl +++ b/templates/data-sources/packer_image.md.tmpl @@ -11,6 +11,16 @@ description: |- ## Example Usage +### Single image sourcing + +{{ tffile "examples/data-sources/hcp_packer_image/data-source-alt.tf" }} + +~> **Note:** The `channel` attribute in this data source may incur a billable request to HCP Packer. This attribute is intended for convenience when using a single image. When sourcing multiple images from a single iteration, the `hcp_packer_iteration` data source is the alternative for querying a channel just once. + +~> **Note:** This data source only returns the first found image's metadata filtered by the given schema values, from the returned list of images associated with the specified iteration. Therefore, if multiple images exist in the same region, it will only pick one of them. If that's the case, you may consider separating your builds into different buckets. + +### Multiple image sourcing from a single iteration + {{ tffile "examples/data-sources/hcp_packer_image/data-source.tf" }} ~> **Note:** This data source only returns the first found image's metadata filtered by the given schema values, from the returned list of images associated with the specified iteration. Therefore, if multiple images exist in the same region, it will only pick one of them. If that's the case, you may consider separating your builds into different buckets.