From 8c66aade21267ffef49a5ec00ec52cf0298d44fc Mon Sep 17 00:00:00 2001
From: HashiCorp Cloud Services <>
Date: Thu, 29 Jun 2023 14:48:39 +0000
Subject: [PATCH] Sync with public Provider
---
CHANGELOG.md | 14 +
docs/data-sources/packer_image.md | 2 +-
docs/data-sources/packer_image_iteration.md | 2 +-
docs/data-sources/packer_iteration.md | 2 +-
docs/guides/packer-channel-management.md | 51 ++
docs/index.md | 2 +-
docs/resources/packer_channel.md | 61 +-
docs/resources/packer_channel_assignment.md | 82 +++
.../assign_latest.tf | 16 +
.../guides/packer_channel_management/main.tf | 14 +
examples/provider/provider.tf | 2 +-
.../hcp_packer_channel/resource_assignment.tf | 23 -
.../resource_using_latest_channel.tf | 12 -
.../hcp_packer_channel_assignment/import.sh | 6 +
.../hcp_packer_channel_assignment/resource.tf | 20 +
go.mod | 2 +-
go.sum | 4 +-
golangci-config.yml | 11 +-
internal/clients/packer.go | 30 +-
internal/provider/data_source_packer_image.go | 2 +-
.../data_source_packer_image_iteration.go | 2 +-
...data_source_packer_image_iteration_test.go | 425 --------------
.../provider/data_source_packer_iteration.go | 2 +-
internal/provider/provider.go | 119 +---
internal/provider/resource_packer_channel.go | 175 +++---
.../resource_packer_channel_assignment.go | 462 +++++++++++++++
...resource_packer_channel_assignment_test.go | 549 ++++++++++++++++++
internal/provider/statuspage.go | 127 ++++
internal/provider/test_helpers.go | 35 --
internal/provider/test_helpers_packer_test.go | 480 +++++++++++++++
internal/provider/test_helpers_test.go | 158 +++++
.../packer_image_iteration.md.tmpl | 2 +-
.../guides/packer-channel-management.md.tmpl | 19 +
templates/resources/packer_channel.md.tmpl | 30 -
34 files changed, 2125 insertions(+), 818 deletions(-)
create mode 100644 docs/guides/packer-channel-management.md
create mode 100644 docs/resources/packer_channel_assignment.md
create mode 100644 examples/guides/packer_channel_management/assign_latest.tf
create mode 100644 examples/guides/packer_channel_management/main.tf
delete mode 100644 examples/resources/hcp_packer_channel/resource_assignment.tf
delete mode 100644 examples/resources/hcp_packer_channel/resource_using_latest_channel.tf
create mode 100644 examples/resources/hcp_packer_channel_assignment/import.sh
create mode 100644 examples/resources/hcp_packer_channel_assignment/resource.tf
create mode 100644 internal/provider/resource_packer_channel_assignment.go
create mode 100644 internal/provider/resource_packer_channel_assignment_test.go
create mode 100644 internal/provider/statuspage.go
delete mode 100644 internal/provider/test_helpers.go
create mode 100644 internal/provider/test_helpers_packer_test.go
create mode 100644 internal/provider/test_helpers_test.go
create mode 100644 templates/guides/packer-channel-management.md.tmpl
delete mode 100644 templates/resources/packer_channel.md.tmpl
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3cc0f5332..d901a3d7b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,17 @@
+## v0.63.0 (June 29, 2023)
+
+FEATURES:
+
+* Adds `hcp_packer_channel_assignment` resource. [[GH-531](https://github.com/hashicorp/terraform-provider-hcp/pull/531)]
+
+IMPROVEMENTS:
+
+* Adds read-only `restricted` field to `hcp_packer_channel` resource. [[GH-531](https://github.com/hashicorp/terraform-provider-hcp/pull/531)]
+
+DEPRECATIONS:
+
+* Setting the `iteration` block on `hcp_packer_channel` is now deprecated in
+favor of `hcp_packer_channel_assignment`. [[GH-531](https://github.com/hashicorp/terraform-provider-hcp/pull/531)]
## v0.62.0 (June 23, 2023)
IMPROVEMENTS:
diff --git a/docs/data-sources/packer_image.md b/docs/data-sources/packer_image.md
index be99f7739..6bb1a9ded 100644
--- a/docs/data-sources/packer_image.md
+++ b/docs/data-sources/packer_image.md
@@ -66,7 +66,7 @@ output "packer-registry-ubuntu-west-1" {
### Required
-- `bucket_name` (String) The slug of the HCP Packer Registry image bucket to pull from.
+- `bucket_name` (String) The slug of the HCP Packer Registry bucket to pull from.
- `cloud_provider` (String) Name of the cloud provider this image is stored-in.
- `region` (String) Region this image is stored in, if any.
diff --git a/docs/data-sources/packer_image_iteration.md b/docs/data-sources/packer_image_iteration.md
index 9614fa6d6..d6f520a1d 100644
--- a/docs/data-sources/packer_image_iteration.md
+++ b/docs/data-sources/packer_image_iteration.md
@@ -22,7 +22,7 @@ data "hcp_packer_image_iteration" "alpine" {
### Required
-- `bucket_name` (String) The slug of the HCP Packer Registry image bucket to pull from.
+- `bucket_name` (String) The slug of the HCP Packer Registry bucket to pull from.
- `channel` (String) The channel that points to the version of the image you want.
### Optional
diff --git a/docs/data-sources/packer_iteration.md b/docs/data-sources/packer_iteration.md
index a61563c13..ef8bf0371 100644
--- a/docs/data-sources/packer_iteration.md
+++ b/docs/data-sources/packer_iteration.md
@@ -24,7 +24,7 @@ data "hcp_packer_iteration" "hardened-source" {
### Required
-- `bucket_name` (String) The slug of the HCP Packer Registry image bucket to pull from.
+- `bucket_name` (String) The slug of the HCP Packer Registry bucket to pull from.
- `channel` (String) The channel that points to the version of the image you want.
### Optional
diff --git a/docs/guides/packer-channel-management.md b/docs/guides/packer-channel-management.md
new file mode 100644
index 000000000..5a45109b1
--- /dev/null
+++ b/docs/guides/packer-channel-management.md
@@ -0,0 +1,51 @@
+---
+subcategory: ""
+page_title: "Advanced Packer Channel Management - HCP Provider"
+description: |-
+ A guide to integreting HCP Packer resources and data sources for more advanced channel management.
+---
+
+# Advanced Packer Channel Management
+
+You can integrate multiple HCP Packer resources and data sources to perform advanced channel management tasks.
+
+## Setting the channel assignment on a Terraform-managed channel
+
+```terraform
+resource "hcp_packer_channel" "advanced" {
+ name = "advanced"
+ bucket_name = "alpine"
+}
+
+resource "hcp_packer_channel_assignment" "advanced" {
+ bucket_name = hcp_packer_channel.advanced.bucket_name
+ channel_name = hcp_packer_channel.advanced.name
+
+ # Exactly one of version, id, or fingerprint must be set:
+ iteration_version = 12
+ # iteration_id = "01H1SF9NWAK8AP25PAWDBGZ1YD"
+ # iteration_fingerprint = "01H1ZMW0Q2W6FT4FK27FQJCFG7"
+}
+```
+
+## Setting the channel assignment to the latest complete iteration
+
+```terraform
+data "hcp_packer_iteration" "alpine_latest" {
+ bucket_name = "alpine"
+ channel = "latest"
+}
+
+resource "hcp_packer_channel" "alpine_advanced" {
+ name = "advanced"
+ bucket_name = data.hcp_packer_iteration.alpine_latest.bucket_name
+}
+
+resource "hcp_packer_channel_assignment" "alpine_advanced" {
+ bucket_name = hcp_packer_channel.alpine_advanced.bucket_name
+ channel_name = hcp_packer_channel.alpine_advanced.name
+
+ iteration_version = data.hcp_packer_iteration.alpine_latest.incremental_version
+}
+```
+
diff --git a/docs/index.md b/docs/index.md
index 092f5b34e..d311b5bf6 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -38,7 +38,7 @@ terraform {
required_providers {
hcp = {
source = "hashicorp/hcp"
- version = "~> 0.62.0"
+ version = "~> 0.63.0"
}
}
}
diff --git a/docs/resources/packer_channel.md b/docs/resources/packer_channel.md
index 9da84b5f7..89106d4bf 100644
--- a/docs/resources/packer_channel.md
+++ b/docs/resources/packer_channel.md
@@ -1,17 +1,17 @@
---
-page_title: "Resource hcp_packer_channel - terraform-provider-hcp"
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "hcp_packer_channel Resource - terraform-provider-hcp"
subcategory: ""
description: |-
- The Packer Channel resource allows you to manage image bucket channels within an active HCP Packer Registry.
+ The Packer Channel resource allows you to manage a bucket channel within an active HCP Packer Registry.
---
# hcp_packer_channel (Resource)
-The Packer Channel resource allows you to manage image bucket channels within an active HCP Packer Registry.
+The Packer Channel resource allows you to manage a bucket channel within an active HCP Packer Registry.
## Example Usage
-To create a channel.
```terraform
resource "hcp_packer_channel" "staging" {
name = "staging"
@@ -19,62 +19,18 @@ resource "hcp_packer_channel" "staging" {
}
```
-To create a channel with iteration assignment managed by Terraform.
-```terraform
-resource "hcp_packer_channel" "staging" {
- name = "staging"
- bucket_name = "alpine"
- iteration {
- # Exactly one of `id`, `fingerprint` or `incremental_version` must be passed
- id = "01H1SF9NWAK8AP25PAWDBGZ1YD"
- # fingerprint = "01H1ZMW0Q2W6FT4FK27FQJCFG7"
- # incremental_version = 1
- }
-}
-
-# To configure a channel to have no assigned iteration, use a "zero value".
-# The zero value for `id` and `fingerprint` is `""`; for `incremental_version`, it is `0`
-resource "hcp_packer_channel" "staging" {
- name = "staging"
- bucket_name = "alpine"
- iteration {
- # Exactly one of `id`, `fingerprint` or `incremental_version` must be passed
- id = ""
- # fingerprint = ""
- # incremental_version = 0
- }
-}
-```
-
-Using the latest channel to create a new channel with the latest complete iteration assigned.
-```terraform
-data "hcp_packer_image_iteration" "latest" {
- bucket_name = "alpine"
- channel = "latest"
-}
-
-resource "hcp_packer_channel" "staging" {
- name = staging
- bucket_name = alpine
- iteration {
- id = data.hcp_packer_image_iteration.latest.id
- }
-}
-```
-
-
## Schema
### Required
-- `bucket_name` (String) The slug of the HCP Packer Registry image bucket where the channel should be created in.
+- `bucket_name` (String) The slug of the HCP Packer Registry bucket where the channel should be created.
- `name` (String) The name of the channel being managed.
### Optional
-- `iteration` (Block List, Max: 1) The iteration assigned to the channel. (see [below for nested schema](#nestedblock--iteration))
-- `project_id` (String) The ID of the HCP project where this channel is located in.
+- `iteration` (Block List, Max: 1, Deprecated) The iteration assigned to the channel. This block is deprecated. Please use `hcp_packer_channel_assignment` instead. (see [below for nested schema](#nestedblock--iteration))
+- `project_id` (String) The ID of the HCP project where this channel is located.
If not specified, the project specified in the HCP Provider config block will be used, if configured.
If a project is not configured in the HCP Provider config block, the oldest project in the organization will be used.
- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts))
@@ -84,7 +40,8 @@ If a project is not configured in the HCP Provider config block, the oldest proj
- `author_id` (String) The author of this channel.
- `created_at` (String) The creation time of this channel.
- `id` (String) The ID of this resource.
-- `organization_id` (String) The ID of the HCP organization where this channel is located in.
+- `organization_id` (String) The ID of the HCP organization where this channel is located.
+- `restricted` (Boolean) If true, the channel is only visible to users with permission to create and manage it. Otherwise the channel is visible to every member of the organization.
- `updated_at` (String) The time this channel was last updated.
diff --git a/docs/resources/packer_channel_assignment.md b/docs/resources/packer_channel_assignment.md
new file mode 100644
index 000000000..ca2a33461
--- /dev/null
+++ b/docs/resources/packer_channel_assignment.md
@@ -0,0 +1,82 @@
+---
+# generated by https://github.com/hashicorp/terraform-plugin-docs
+page_title: "hcp_packer_channel_assignment Resource - terraform-provider-hcp"
+subcategory: ""
+description: |-
+ The Packer Channel Assignment resource allows you to manage the iteration assigned to a bucket channel in an active HCP Packer Registry.
+---
+
+# hcp_packer_channel_assignment (Resource)
+
+The Packer Channel Assignment resource allows you to manage the iteration assigned to a bucket channel in an active HCP Packer Registry.
+
+## Example Usage
+
+```terraform
+resource "hcp_packer_channel_assignment" "staging" {
+ bucket_name = "alpine"
+ channel_name = "staging"
+
+ # Exactly one of version, id, or fingerprint must be set:
+ iteration_version = 12
+ # iteration_id = "01H1SF9NWAK8AP25PAWDBGZ1YD"
+ # iteration_fingerprint = "01H1ZMW0Q2W6FT4FK27FQJCFG7"
+}
+
+# To set the channel to have no assignment, use one of the iteration attributes with their zero value.
+# The two string-typed iteration attributes, id and fingerprint, use "none" as their zero value.
+resource "hcp_packer_channel_assignment" "staging" {
+ bucket_name = "alpine"
+ channel_name = "staging"
+
+ iteration_version = 0
+ # iteration_id = "none"
+ # iteration_fingerprint = "none"
+}
+```
+
+
+## Schema
+
+### Required
+
+- `bucket_name` (String) The slug of the HCP Packer Registry bucket where the channel is located.
+- `channel_name` (String) The name of the HCP Packer channel being managed.
+
+### Optional
+
+- `iteration_fingerprint` (String) The fingerprint of the iteration assigned to the channel.
+- `iteration_id` (String) The ID of the iteration assigned to the channel.
+- `iteration_version` (Number) The incremental version of the iteration assigned to the channel.
+- `project_id` (String) The ID of the HCP project where the channel is located.
+If not specified, the project specified in the HCP Provider config block will be used, if configured.
+If a project is not configured in the HCP Provider config block, the oldest project in the organization will be used.
+- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts))
+
+### Read-Only
+
+- `id` (String) The ID of this resource.
+- `organization_id` (String) The ID of the HCP organization where this channel is located. Always the same as the associated channel.
+
+
+### Nested Schema for `timeouts`
+
+Optional:
+
+- `create` (String)
+- `default` (String)
+- `delete` (String)
+- `update` (String)
+
+## Import
+
+Import is supported using the following syntax:
+
+```shell
+# Using an explicit project ID, the import ID is:
+# {project_id}:{bucket_name}:{channel_name}
+terraform import hcp_packer_channel_assignment.staging f709ec73-55d4-46d8-897d-816ebba28778:alpine:staging
+# Using the provider-default project ID, the import ID is:
+# {bucket_name}:{channel_name}
+terraform import hcp_packer_channel_assignment.staging alpine:staging
+```
diff --git a/examples/guides/packer_channel_management/assign_latest.tf b/examples/guides/packer_channel_management/assign_latest.tf
new file mode 100644
index 000000000..6f9e7d13a
--- /dev/null
+++ b/examples/guides/packer_channel_management/assign_latest.tf
@@ -0,0 +1,16 @@
+data "hcp_packer_iteration" "alpine_latest" {
+ bucket_name = "alpine"
+ channel = "latest"
+}
+
+resource "hcp_packer_channel" "alpine_advanced" {
+ name = "advanced"
+ bucket_name = data.hcp_packer_iteration.alpine_latest.bucket_name
+}
+
+resource "hcp_packer_channel_assignment" "alpine_advanced" {
+ bucket_name = hcp_packer_channel.alpine_advanced.bucket_name
+ channel_name = hcp_packer_channel.alpine_advanced.name
+
+ iteration_version = data.hcp_packer_iteration.alpine_latest.incremental_version
+}
diff --git a/examples/guides/packer_channel_management/main.tf b/examples/guides/packer_channel_management/main.tf
new file mode 100644
index 000000000..d665727e3
--- /dev/null
+++ b/examples/guides/packer_channel_management/main.tf
@@ -0,0 +1,14 @@
+resource "hcp_packer_channel" "advanced" {
+ name = "advanced"
+ bucket_name = "alpine"
+}
+
+resource "hcp_packer_channel_assignment" "advanced" {
+ bucket_name = hcp_packer_channel.advanced.bucket_name
+ channel_name = hcp_packer_channel.advanced.name
+
+ # Exactly one of version, id, or fingerprint must be set:
+ iteration_version = 12
+ # iteration_id = "01H1SF9NWAK8AP25PAWDBGZ1YD"
+ # iteration_fingerprint = "01H1ZMW0Q2W6FT4FK27FQJCFG7"
+}
diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf
index b3158bb71..16bb3d160 100644
--- a/examples/provider/provider.tf
+++ b/examples/provider/provider.tf
@@ -3,7 +3,7 @@ terraform {
required_providers {
hcp = {
source = "hashicorp/hcp"
- version = "~> 0.62.0"
+ version = "~> 0.63.0"
}
}
}
diff --git a/examples/resources/hcp_packer_channel/resource_assignment.tf b/examples/resources/hcp_packer_channel/resource_assignment.tf
deleted file mode 100644
index 783993d36..000000000
--- a/examples/resources/hcp_packer_channel/resource_assignment.tf
+++ /dev/null
@@ -1,23 +0,0 @@
-resource "hcp_packer_channel" "staging" {
- name = "staging"
- bucket_name = "alpine"
- iteration {
- # Exactly one of `id`, `fingerprint` or `incremental_version` must be passed
- id = "01H1SF9NWAK8AP25PAWDBGZ1YD"
- # fingerprint = "01H1ZMW0Q2W6FT4FK27FQJCFG7"
- # incremental_version = 1
- }
-}
-
-# To configure a channel to have no assigned iteration, use a "zero value".
-# The zero value for `id` and `fingerprint` is `""`; for `incremental_version`, it is `0`
-resource "hcp_packer_channel" "staging" {
- name = "staging"
- bucket_name = "alpine"
- iteration {
- # Exactly one of `id`, `fingerprint` or `incremental_version` must be passed
- id = ""
- # fingerprint = ""
- # incremental_version = 0
- }
-}
\ No newline at end of file
diff --git a/examples/resources/hcp_packer_channel/resource_using_latest_channel.tf b/examples/resources/hcp_packer_channel/resource_using_latest_channel.tf
deleted file mode 100644
index ca6a7ad62..000000000
--- a/examples/resources/hcp_packer_channel/resource_using_latest_channel.tf
+++ /dev/null
@@ -1,12 +0,0 @@
-data "hcp_packer_image_iteration" "latest" {
- bucket_name = "alpine"
- channel = "latest"
-}
-
-resource "hcp_packer_channel" "staging" {
- name = staging
- bucket_name = alpine
- iteration {
- id = data.hcp_packer_image_iteration.latest.id
- }
-}
diff --git a/examples/resources/hcp_packer_channel_assignment/import.sh b/examples/resources/hcp_packer_channel_assignment/import.sh
new file mode 100644
index 000000000..9fc12e110
--- /dev/null
+++ b/examples/resources/hcp_packer_channel_assignment/import.sh
@@ -0,0 +1,6 @@
+# Using an explicit project ID, the import ID is:
+# {project_id}:{bucket_name}:{channel_name}
+terraform import hcp_packer_channel_assignment.staging f709ec73-55d4-46d8-897d-816ebba28778:alpine:staging
+# Using the provider-default project ID, the import ID is:
+# {bucket_name}:{channel_name}
+terraform import hcp_packer_channel_assignment.staging alpine:staging
diff --git a/examples/resources/hcp_packer_channel_assignment/resource.tf b/examples/resources/hcp_packer_channel_assignment/resource.tf
new file mode 100644
index 000000000..540bb0b2f
--- /dev/null
+++ b/examples/resources/hcp_packer_channel_assignment/resource.tf
@@ -0,0 +1,20 @@
+resource "hcp_packer_channel_assignment" "staging" {
+ bucket_name = "alpine"
+ channel_name = "staging"
+
+ # Exactly one of version, id, or fingerprint must be set:
+ iteration_version = 12
+ # iteration_id = "01H1SF9NWAK8AP25PAWDBGZ1YD"
+ # iteration_fingerprint = "01H1ZMW0Q2W6FT4FK27FQJCFG7"
+}
+
+# To set the channel to have no assignment, use one of the iteration attributes with their zero value.
+# The two string-typed iteration attributes, id and fingerprint, use "none" as their zero value.
+resource "hcp_packer_channel_assignment" "staging" {
+ bucket_name = "alpine"
+ channel_name = "staging"
+
+ iteration_version = 0
+ # iteration_id = "none"
+ # iteration_fingerprint = "none"
+}
diff --git a/go.mod b/go.mod
index 469debae6..6a0b81caf 100644
--- a/go.mod
+++ b/go.mod
@@ -82,7 +82,7 @@ require (
github.com/vmihailenco/msgpack/v4 v4.3.12 // indirect
github.com/vmihailenco/tagparser v0.1.1 // indirect
github.com/zclconf/go-cty v1.12.1 // indirect
- go.mongodb.org/mongo-driver v1.11.0 // indirect
+ go.mongodb.org/mongo-driver v1.11.7 // indirect
go.opentelemetry.io/otel v1.11.1 // indirect
go.opentelemetry.io/otel/trace v1.11.1 // indirect
golang.org/x/crypto v0.6.0 // indirect
diff --git a/go.sum b/go.sum
index 40854c1a9..538de82fe 100644
--- a/go.sum
+++ b/go.sum
@@ -337,8 +337,8 @@ github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRK
go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg=
go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng=
go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
-go.mongodb.org/mongo-driver v1.11.0 h1:FZKhBSTydeuffHj9CBjXlR8vQLee1cQyTWYPA6/tqiE=
-go.mongodb.org/mongo-driver v1.11.0/go.mod h1:s7p5vEtfbeR1gYi6pnj3c3/urpbLv2T5Sfd6Rp2HBB8=
+go.mongodb.org/mongo-driver v1.11.7 h1:LIwYxASDLGUg/8wOhgOOZhX8tQa/9tgZPgzZoVqJvcs=
+go.mongodb.org/mongo-driver v1.11.7/go.mod h1:G9TgswdsWjX4tmDA5zfs2+6AEPpYJwqblyjsfuh8oXY=
go.opentelemetry.io/otel v1.11.1 h1:4WLLAmcfkmDk2ukNXJyq3/kiz/3UzCaYq6PskJsaou4=
go.opentelemetry.io/otel v1.11.1/go.mod h1:1nNhXBbWSD0nsL38H6btgnFN2k4i0sNLHNNMZMSbUGE=
go.opentelemetry.io/otel/sdk v1.11.1 h1:F7KmQgoHljhUuJyA+9BiU+EkJfyX5nVVF4wyzWZpKxs=
diff --git a/golangci-config.yml b/golangci-config.yml
index 8ea326a24..4cd36fe8c 100644
--- a/golangci-config.yml
+++ b/golangci-config.yml
@@ -23,11 +23,12 @@ linters:
linters-settings:
depguard:
- list-type: blacklist
- packages:
- - github.com/gogo/status
- - github.com/gogo/codes
- - github.com/gogo/protobuf
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/gogo/status"
+ - pkg: "github.com/gogo/codes"
+ - pkg: "github.com/gogo/protobuf"
misspell:
locale: US
diff --git a/internal/clients/packer.go b/internal/clients/packer.go
index 18ffe5647..1664f6618 100644
--- a/internal/clients/packer.go
+++ b/internal/clients/packer.go
@@ -12,14 +12,14 @@ import (
sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models"
)
-// GetPackerChannelBySlug queries the HCP Packer registry for the iteration
+// GetPackerChannelBySlug queries the HCP Packer registry for the channel
// associated with the given channel name.
func GetPackerChannelBySlug(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation,
- bucketName string, channel string) (*packermodels.HashicorpCloudPackerChannel, error) {
+ bucketName string, channelName string) (*packermodels.HashicorpCloudPackerChannel, error) {
getParams := packer_service.NewPackerServiceGetChannelParams()
getParams.BucketSlug = bucketName
- getParams.Slug = channel
+ getParams.Slug = channelName
getParams.LocationOrganizationID = loc.OrganizationID
getParams.LocationProjectID = loc.ProjectID
@@ -31,6 +31,24 @@ func GetPackerChannelBySlug(ctx context.Context, client *Client, loc *sharedmode
return getResp.Payload.Channel, nil
}
+// GetPackerChannelBySlugFromList queries the HCP Packer registry for the
+// channel associated with the given channel name, using ListBucketChannels
+func GetPackerChannelBySlugFromList(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation,
+ bucketName string, channelName string) (*packermodels.HashicorpCloudPackerChannel, error) {
+ resp, err := ListBucketChannels(ctx, client, loc, bucketName)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, channel := range resp.Channels {
+ if channel.Slug == channelName {
+ return channel, nil
+ }
+ }
+
+ return nil, nil
+}
+
// GetIterationFromID queries the HCP Packer registry for an existing bucket iteration using its ULID.
func GetIterationFromID(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation,
bucketSlug string, iterationID string) (*packermodels.HashicorpCloudPackerIteration, error) {
@@ -75,12 +93,13 @@ func getIteration(client *Client, params *packer_service.PackerServiceGetIterati
// CreateBucketChannel creates a channel on the named bucket.
func CreateBucketChannel(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation, bucketSlug string, channelSlug string,
- iteration *packermodels.HashicorpCloudPackerIteration) (*packermodels.HashicorpCloudPackerChannel, error) {
+ iteration *packermodels.HashicorpCloudPackerIteration, restriction *packermodels.HashicorpCloudPackerCreateChannelRequestRestriction) (*packermodels.HashicorpCloudPackerChannel, error) {
params := packer_service.NewPackerServiceCreateChannelParamsWithContext(ctx)
params.LocationOrganizationID = loc.OrganizationID
params.LocationProjectID = loc.ProjectID
params.BucketSlug = bucketSlug
params.Body.Slug = channelSlug
+ params.Body.Restriction = restriction
if iteration != nil {
switch {
@@ -104,12 +123,13 @@ func CreateBucketChannel(ctx context.Context, client *Client, loc *sharedmodels.
// UpdateBucketChannel updates the named channel.
func UpdateBucketChannel(ctx context.Context, client *Client, loc *sharedmodels.HashicorpCloudLocationLocation, bucketSlug string, channelSlug string,
- iteration *packermodels.HashicorpCloudPackerIteration) (*packermodels.HashicorpCloudPackerChannel, error) {
+ iteration *packermodels.HashicorpCloudPackerIteration, restriction *packermodels.HashicorpCloudPackerUpdateChannelRequestRestriction) (*packermodels.HashicorpCloudPackerChannel, error) {
params := packer_service.NewPackerServiceUpdateChannelParamsWithContext(ctx)
params.LocationOrganizationID = loc.OrganizationID
params.LocationProjectID = loc.ProjectID
params.BucketSlug = bucketSlug
params.Slug = channelSlug
+ params.Body.Restriction = restriction
if iteration != nil {
switch {
diff --git a/internal/provider/data_source_packer_image.go b/internal/provider/data_source_packer_image.go
index dc3ba2c2a..22fe3a319 100644
--- a/internal/provider/data_source_packer_image.go
+++ b/internal/provider/data_source_packer_image.go
@@ -28,7 +28,7 @@ func dataSourcePackerImage() *schema.Resource {
Schema: map[string]*schema.Schema{
// Required inputs
"bucket_name": {
- Description: "The slug of the HCP Packer Registry image bucket to pull from.",
+ Description: "The slug of the HCP Packer Registry bucket to pull from.",
Type: schema.TypeString,
Required: true,
ValidateDiagFunc: validateSlugID,
diff --git a/internal/provider/data_source_packer_image_iteration.go b/internal/provider/data_source_packer_image_iteration.go
index 5c82014c8..b5b63e7d3 100644
--- a/internal/provider/data_source_packer_image_iteration.go
+++ b/internal/provider/data_source_packer_image_iteration.go
@@ -26,7 +26,7 @@ func dataSourcePackerImageIteration() *schema.Resource {
Schema: map[string]*schema.Schema{
// Required inputs
"bucket_name": {
- Description: "The slug of the HCP Packer Registry image bucket to pull from.",
+ Description: "The slug of the HCP Packer Registry bucket to pull from.",
Type: schema.TypeString,
Required: true,
ValidateDiagFunc: validateSlugID,
diff --git a/internal/provider/data_source_packer_image_iteration_test.go b/internal/provider/data_source_packer_image_iteration_test.go
index dee8458bc..af8fcb498 100644
--- a/internal/provider/data_source_packer_image_iteration_test.go
+++ b/internal/provider/data_source_packer_image_iteration_test.go
@@ -5,21 +5,12 @@ package provider
import (
"fmt"
- "net/http"
"testing"
"time"
- "github.com/cenkalti/backoff"
"github.com/go-openapi/strfmt"
- "github.com/google/uuid"
- "github.com/hashicorp/hcp-sdk-go/clients/cloud-operation/stable/2020-05-05/client/operation_service"
- "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/client/packer_service"
- "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models"
- sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
- "github.com/hashicorp/terraform-provider-hcp/internal/clients"
- "google.golang.org/grpc/codes"
)
var (
@@ -41,422 +32,6 @@ var (
}`, acctestUbuntuBucket, acctestProductionChannel)
)
-func upsertRegistry(t *testing.T) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- params := packer_service.NewPackerServiceCreateRegistryParams()
- params.LocationOrganizationID = loc.OrganizationID
- params.LocationProjectID = loc.ProjectID
- featureTier := models.HashicorpCloudPackerRegistryConfigTierPLUS
- params.Body = packer_service.PackerServiceCreateRegistryBody{
- FeatureTier: &featureTier,
- }
-
- resp, err := client.Packer.PackerServiceCreateRegistry(params, nil)
- if err, ok := err.(*packer_service.PackerServiceCreateRegistryDefault); ok {
- switch err.Code() {
- case int(codes.AlreadyExists), http.StatusConflict:
- getParams := packer_service.NewPackerServiceGetRegistryParams()
- getParams.LocationOrganizationID = loc.OrganizationID
- getParams.LocationProjectID = loc.ProjectID
- getResp, err := client.Packer.PackerServiceGetRegistry(getParams, nil)
- if err != nil {
- t.Errorf("unexpected GetRegistry error: %v", err)
- return
- }
- if *getResp.Payload.Registry.Config.FeatureTier != models.HashicorpCloudPackerRegistryConfigTierPLUS {
- // Make sure is a plus registry
- params := packer_service.NewPackerServiceUpdateRegistryParams()
- params.LocationOrganizationID = loc.OrganizationID
- params.LocationProjectID = loc.ProjectID
- featureTier := models.HashicorpCloudPackerRegistryConfigTierPLUS
- params.Body = packer_service.PackerServiceUpdateRegistryBody{
- FeatureTier: &featureTier,
- }
- resp, err := client.Packer.PackerServiceUpdateRegistry(params, nil)
- if err != nil {
- t.Errorf("unexpected UpdateRegistry error: %v", err)
- return
- }
- waitForOperation(t, loc, "Reactivate Registry", resp.Payload.Operation.ID, client)
- }
- return
- default:
- t.Errorf("unexpected CreateRegistry error, expected nil or 409. Got code: %d err: %v", err.Code(), err)
- return
- }
- }
-
- waitForOperation(t, loc, "Create Registry", resp.Payload.Operation.ID, client)
-}
-
-func waitForOperation(
- t *testing.T,
- loc *sharedmodels.HashicorpCloudLocationLocation,
- operationName string,
- operationID string,
- client *clients.Client,
-) {
- timeout := "5s"
- params := operation_service.NewWaitParams()
- params.ID = operationID
- params.Timeout = &timeout
- params.LocationOrganizationID = loc.OrganizationID
- params.LocationProjectID = loc.ProjectID
-
- operation := func() error {
- resp, err := client.Operation.Wait(params, nil)
- if err != nil {
- t.Errorf("unexpected error %#v", err)
- }
-
- if resp.Payload.Operation.Error != nil {
- t.Errorf("Operation failed: %s", resp.Payload.Operation.Error.Message)
- }
-
- switch *resp.Payload.Operation.State {
- case sharedmodels.HashicorpCloudOperationOperationStatePENDING:
- msg := fmt.Sprintf("==> Operation \"%s\" pending...", operationName)
- return fmt.Errorf(msg)
- case sharedmodels.HashicorpCloudOperationOperationStateRUNNING:
- msg := fmt.Sprintf("==> Operation \"%s\" running...", operationName)
- return fmt.Errorf(msg)
- case sharedmodels.HashicorpCloudOperationOperationStateDONE:
- default:
- t.Errorf("Operation returned unknown state: %s", *resp.Payload.Operation.State)
- }
- return nil
- }
-
- bo := backoff.NewExponentialBackOff()
- bo.InitialInterval = 10 * time.Second
- bo.RandomizationFactor = 0.5
- bo.Multiplier = 1.5
- bo.MaxInterval = 30 * time.Second
- bo.MaxElapsedTime = 40 * time.Minute
- err := backoff.Retry(operation, bo)
- if err != nil {
- t.Errorf("unexpected error: %#v", err)
- }
-}
-
-func upsertBucket(t *testing.T, bucketSlug string) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- createBktParams := packer_service.NewPackerServiceCreateBucketParams()
- createBktParams.LocationOrganizationID = loc.OrganizationID
- createBktParams.LocationProjectID = loc.ProjectID
- createBktParams.Body = packer_service.PackerServiceCreateBucketBody{
- BucketSlug: bucketSlug,
- }
- _, err := client.Packer.PackerServiceCreateBucket(createBktParams, nil)
- if err == nil {
- return
- }
- if err, ok := err.(*packer_service.PackerServiceCreateBucketDefault); ok {
- switch err.Code() {
- case int(codes.AlreadyExists), http.StatusConflict:
- // all good here !
- return
- }
- }
-
- t.Errorf("unexpected CreateBucket error, expected nil or 409. Got %v", err)
-}
-
-func upsertIteration(t *testing.T, bucketSlug, fingerprint string) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- createItParams := packer_service.NewPackerServiceCreateIterationParams()
- createItParams.LocationOrganizationID = loc.OrganizationID
- createItParams.LocationProjectID = loc.ProjectID
- createItParams.BucketSlug = bucketSlug
-
- createItParams.Body = packer_service.PackerServiceCreateIterationBody{
- Fingerprint: fingerprint,
- }
- _, err := client.Packer.PackerServiceCreateIteration(createItParams, nil)
- if err == nil {
- return
- }
- if err, ok := err.(*packer_service.PackerServiceCreateIterationDefault); ok {
- switch err.Code() {
- case int(codes.AlreadyExists), http.StatusConflict:
- // all good here !
- return
- }
- }
-
- t.Errorf("unexpected CreateIteration error, expected nil or 409. Got %v", err)
-}
-
-func revokeIteration(t *testing.T, iterationID, bucketSlug string, revokeAt strfmt.DateTime) {
- t.Helper()
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- params := packer_service.NewPackerServiceUpdateIterationParams()
- params.LocationOrganizationID = loc.OrganizationID
- params.LocationProjectID = loc.ProjectID
- params.IterationID = iterationID
- params.Body = packer_service.PackerServiceUpdateIterationBody{
- BucketSlug: bucketSlug,
- RevokeAt: revokeAt,
- }
-
- _, err := client.Packer.PackerServiceUpdateIteration(params, nil)
- if err != nil {
- t.Fatal(err)
- }
-}
-
-func getIterationIDFromFingerPrint(t *testing.T, bucketSlug string, fingerprint string) (string, error) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- getItParams := packer_service.NewPackerServiceGetIterationParams()
- getItParams.LocationOrganizationID = loc.OrganizationID
- getItParams.LocationProjectID = loc.ProjectID
- getItParams.BucketSlug = bucketSlug
- getItParams.Fingerprint = &fingerprint
-
- ok, err := client.Packer.PackerServiceGetIteration(getItParams, nil)
- if err != nil {
- return "", err
- }
- return ok.Payload.Iteration.ID, nil
-}
-
-func upsertBuild(t *testing.T, bucketSlug, fingerprint, iterationID string) {
- client := testAccProvider.Meta().(*clients.Client)
-
- createBuildParams := packer_service.NewPackerServiceCreateBuildParams()
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
- createBuildParams.LocationOrganizationID = loc.OrganizationID
- createBuildParams.LocationProjectID = loc.ProjectID
- createBuildParams.BucketSlug = bucketSlug
- createBuildParams.IterationID = iterationID
-
- status := models.HashicorpCloudPackerBuildStatusRUNNING
- createBuildParams.Body = packer_service.PackerServiceCreateBuildBody{
- Build: &models.HashicorpCloudPackerBuildCreateBody{
- CloudProvider: "aws",
- ComponentType: "amazon-ebs.example",
- PackerRunUUID: uuid.New().String(),
- Status: &status,
- },
- Fingerprint: fingerprint,
- }
-
- build, err := client.Packer.PackerServiceCreateBuild(createBuildParams, nil)
- if err, ok := err.(*packer_service.PackerServiceCreateBuildDefault); ok {
- switch err.Code() {
- case int(codes.Aborted), http.StatusConflict:
- // all good here !
- return
- }
- }
-
- if build == nil {
- t.Errorf("unexpected CreateBuild error, expected non nil build response. Got %v", err)
- return
- }
-
- // Iterations are currently only assigned an incremental version when publishing image metadata on update.
- // Incremental versions are a requirement for assigning the channel.
- updateBuildParams := packer_service.NewPackerServiceUpdateBuildParams()
- updateBuildParams.LocationOrganizationID = loc.OrganizationID
- updateBuildParams.LocationProjectID = loc.ProjectID
- updateBuildParams.BuildID = build.Payload.Build.ID
- updatesStatus := models.HashicorpCloudPackerBuildStatusDONE
- updateBuildParams.Body = packer_service.PackerServiceUpdateBuildBody{
- Updates: &models.HashicorpCloudPackerBuildUpdates{
- Status: &updatesStatus,
- Images: []*models.HashicorpCloudPackerImageCreateBody{
- {
- ImageID: "ami-42",
- Region: "us-east-1",
- },
- {
- ImageID: "ami-43",
- Region: "us-east-2",
- },
- },
- Labels: map[string]string{"test-key": "test-value"},
- },
- }
- _, err = client.Packer.PackerServiceUpdateBuild(updateBuildParams, nil)
- if err, ok := err.(*packer_service.PackerServiceUpdateBuildDefault); ok {
- t.Errorf("unexpected UpdateBuild error, expected nil. Got %v", err)
- }
-}
-
-func upsertChannel(t *testing.T, bucketSlug, channelSlug, iterationID string) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- createChParams := packer_service.NewPackerServiceCreateChannelParams()
- createChParams.LocationOrganizationID = loc.OrganizationID
- createChParams.LocationProjectID = loc.ProjectID
- createChParams.BucketSlug = bucketSlug
- createChParams.Body = packer_service.PackerServiceCreateChannelBody{
- Slug: channelSlug,
- IterationID: iterationID,
- }
-
- _, err := client.Packer.PackerServiceCreateChannel(createChParams, nil)
- if err == nil {
- return
- }
- if err, ok := err.(*packer_service.PackerServiceCreateChannelDefault); ok {
- switch err.Code() {
- case int(codes.AlreadyExists), http.StatusConflict:
- // all good here !
- updateChannel(t, bucketSlug, channelSlug, iterationID)
- return
- }
- }
- t.Errorf("unexpected CreateChannel error, expected nil. Got %v", err)
-}
-
-func updateChannel(t *testing.T, bucketSlug, channelSlug, iterationID string) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- updateChParams := packer_service.NewPackerServiceUpdateChannelParams()
- updateChParams.LocationOrganizationID = loc.OrganizationID
- updateChParams.LocationProjectID = loc.ProjectID
- updateChParams.BucketSlug = bucketSlug
- updateChParams.Slug = channelSlug
- updateChParams.Body = packer_service.PackerServiceUpdateChannelBody{
- IterationID: iterationID,
- }
-
- _, err := client.Packer.PackerServiceUpdateChannel(updateChParams, nil)
- if err == nil {
- return
- }
- t.Errorf("unexpected UpdateChannel error, expected nil. Got %v", err)
-}
-
-func deleteBucket(t *testing.T, bucketSlug string, logOnError bool) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- deleteBktParams := packer_service.NewPackerServiceDeleteBucketParams()
- deleteBktParams.LocationOrganizationID = loc.OrganizationID
- deleteBktParams.LocationProjectID = loc.ProjectID
- deleteBktParams.BucketSlug = bucketSlug
-
- _, err := client.Packer.PackerServiceDeleteBucket(deleteBktParams, nil)
- if err == nil {
- return
- }
- if logOnError {
- t.Logf("unexpected DeleteBucket error, expected nil. Got %v", err)
- }
-}
-
-func deleteIteration(t *testing.T, bucketSlug string, iterationFingerprint string, logOnError bool) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- iterationID, err := getIterationIDFromFingerPrint(t, bucketSlug, iterationFingerprint)
- if err != nil {
- if logOnError {
- t.Logf(err.Error())
- }
- return
- }
-
- deleteItParams := packer_service.NewPackerServiceDeleteIterationParams()
- deleteItParams.LocationOrganizationID = loc.OrganizationID
- deleteItParams.LocationProjectID = loc.ProjectID
- deleteItParams.BucketSlug = &bucketSlug
- deleteItParams.IterationID = iterationID
-
- _, err = client.Packer.PackerServiceDeleteIteration(deleteItParams, nil)
- if err == nil {
- return
- }
- if logOnError {
- t.Logf("unexpected DeleteIteration error, expected nil. Got %v", err)
- }
-}
-
-func deleteChannel(t *testing.T, bucketSlug string, channelSlug string, logOnError bool) {
- t.Helper()
-
- client := testAccProvider.Meta().(*clients.Client)
- loc := &sharedmodels.HashicorpCloudLocationLocation{
- OrganizationID: client.Config.OrganizationID,
- ProjectID: client.Config.ProjectID,
- }
-
- deleteChParams := packer_service.NewPackerServiceDeleteChannelParams()
- deleteChParams.LocationOrganizationID = loc.OrganizationID
- deleteChParams.LocationProjectID = loc.ProjectID
- deleteChParams.BucketSlug = bucketSlug
- deleteChParams.Slug = channelSlug
-
- _, err := client.Packer.PackerServiceDeleteChannel(deleteChParams, nil)
- if err == nil {
- return
- }
- if logOnError {
- t.Logf("unexpected DeleteChannel error, expected nil. Got %v", err)
- }
-}
-
func TestAcc_dataSourcePacker(t *testing.T) {
resourceName := "data.hcp_packer_image_iteration.alpine"
fingerprint := "42"
diff --git a/internal/provider/data_source_packer_iteration.go b/internal/provider/data_source_packer_iteration.go
index fd9b7046c..e9817ce35 100644
--- a/internal/provider/data_source_packer_iteration.go
+++ b/internal/provider/data_source_packer_iteration.go
@@ -25,7 +25,7 @@ func dataSourcePackerIteration() *schema.Resource {
Schema: map[string]*schema.Schema{
// Required inputs
"bucket_name": {
- Description: "The slug of the HCP Packer Registry image bucket to pull from.",
+ Description: "The slug of the HCP Packer Registry bucket to pull from.",
Type: schema.TypeString,
Required: true,
ValidateDiagFunc: validateSlugID,
diff --git a/internal/provider/provider.go b/internal/provider/provider.go
index d185f25d9..cc0fc827d 100644
--- a/internal/provider/provider.go
+++ b/internal/provider/provider.go
@@ -5,10 +5,6 @@ package provider
import (
"context"
- "encoding/json"
- "fmt"
- "io"
- "net/http"
"os"
"time"
@@ -56,6 +52,7 @@ func New() func() *schema.Provider {
"hcp_hvn_peering_connection": resourceHvnPeeringConnection(),
"hcp_hvn_route": resourceHvnRoute(),
"hcp_packer_channel": resourcePackerChannel(),
+ "hcp_packer_channel_assignment": resourcePackerChannelAssignment(),
"hcp_vault_cluster": resourceVaultCluster(),
"hcp_vault_cluster_admin_token": resourceVaultClusterAdminToken(),
},
@@ -201,28 +198,6 @@ func getProjectFromCredentials(ctx context.Context, client *clients.Client) (pro
return project, diags
}
-// Status endpoint for prod.
-const statuspageURL = "https://status.hashicorp.com/api/v2/components.json"
-
-var hcpComponentIds = map[string]string{
- "0q55nwmxngkc": "HCP API",
- "sxffkgfb4fhb": "HCP Consul",
- "0mbkqnrzg33w": "HCP Packer",
- "mgv1p2j9x444": "HCP Portal",
- "mb7xrbx9gjnq": "HCP Vault",
-}
-
-type statuspage struct {
- Components []component `json:"components"`
-}
-
-type component struct {
- ID string `json:"id"`
- Status status `json:"status"`
-}
-
-type status string
-
// getOldestProject retrieves the oldest project from a list based on its created_at time.
func getOldestProject(projects []*models.ResourcemanagerProject) (oldestProj *models.ResourcemanagerProject) {
oldestTime := time.Now()
@@ -236,95 +211,3 @@ func getOldestProject(projects []*models.ResourcemanagerProject) (oldestProj *mo
}
return oldestProj
}
-
-func isHCPOperational() (diags diag.Diagnostics) {
- req, err := http.NewRequest("GET", statuspageURL, nil)
- if err != nil {
- diags = append(diags, diag.Diagnostic{
- Severity: diag.Warning,
- Summary: "You may experience issues using HCP.",
- Detail: fmt.Sprintf("Unable to create request to verify HCP status: %s", err),
- })
-
- return diags
- }
-
- var cl = http.Client{}
- resp, err := cl.Do(req)
- if err != nil {
- diags = append(diags, diag.Diagnostic{
- Severity: diag.Warning,
- Summary: "You may experience issues using HCP.",
- Detail: fmt.Sprintf("Unable to complete request to verify HCP status: %s", err),
- })
-
- return diags
- }
- defer resp.Body.Close()
-
- jsBytes, err := io.ReadAll(resp.Body)
- if err != nil {
- diags = append(diags, diag.Diagnostic{
- Severity: diag.Warning,
- Summary: "You may experience issues using HCP.",
- Detail: fmt.Sprintf("Unable read response to verify HCP status: %s", err),
- })
-
- return diags
- }
-
- sp := statuspage{}
- err = json.Unmarshal(jsBytes, &sp)
- if err != nil {
- diags = append(diags, diag.Diagnostic{
- Severity: diag.Warning,
- Summary: "You may experience issues using HCP.",
- Detail: fmt.Sprintf("Unable unmarshal response to verify HCP status: %s", err),
- })
-
- return diags
- }
-
- // Translate the status page component IDs into a map of component name and operation status.
- var systemStatus = map[string]status{}
-
- for _, c := range sp.Components {
- name, ok := hcpComponentIds[c.ID]
- if ok {
- systemStatus[name] = c.Status
- }
- }
-
- operational := true
- for _, st := range systemStatus {
- if st != "operational" {
- operational = false
- }
- }
-
- if !operational {
- diags = append(diags, diag.Diagnostic{
- Severity: diag.Warning,
- Summary: "You may experience issues using HCP.",
- Detail: fmt.Sprintf("HCP is reporting the following:\n\n%v\nPlease check https://status.hashicorp.com for more details.", printStatus(systemStatus)),
- })
- }
-
- return diags
-}
-
-func printStatus(m map[string]status) string {
- var maxLenKey int
- for k := range m {
- if len(k) > maxLenKey {
- maxLenKey = len(k)
- }
- }
-
- pr := ""
- for k, v := range m {
- pr += fmt.Sprintf("%s:%*s %s\n", k, 5+(maxLenKey-len(k)), " ", v)
- }
-
- return pr
-}
diff --git a/internal/provider/resource_packer_channel.go b/internal/provider/resource_packer_channel.go
index f59ff51db..f455f87dc 100644
--- a/internal/provider/resource_packer_channel.go
+++ b/internal/provider/resource_packer_channel.go
@@ -20,7 +20,7 @@ import (
func resourcePackerChannel() *schema.Resource {
return &schema.Resource{
- Description: "The Packer Channel resource allows you to manage image bucket channels within an active HCP Packer Registry.",
+ Description: "The Packer Channel resource allows you to manage a bucket channel within an active HCP Packer Registry.",
CreateContext: resourcePackerChannelCreate,
DeleteContext: resourcePackerChannelDelete,
ReadContext: resourcePackerChannelRead,
@@ -35,7 +35,6 @@ func resourcePackerChannel() *schema.Resource {
StateContext: resourcePackerChannelImport,
},
CustomizeDiff: resourcePackerChannelCustomizeDiff,
-
Schema: map[string]*schema.Schema{
// Required inputs
"name": {
@@ -46,7 +45,7 @@ func resourcePackerChannel() *schema.Resource {
ValidateDiagFunc: validateSlugID,
},
"bucket_name": {
- Description: "The slug of the HCP Packer Registry image bucket where the channel should be created in.",
+ Description: "The slug of the HCP Packer Registry bucket where the channel should be created.",
Type: schema.TypeString,
ForceNew: true,
Required: true,
@@ -55,7 +54,7 @@ func resourcePackerChannel() *schema.Resource {
// Optional inputs
"project_id": {
Description: `
-The ID of the HCP project where this channel is located in.
+The ID of the HCP project where this channel is located.
If not specified, the project specified in the HCP Provider config block will be used, if configured.
If a project is not configured in the HCP Provider config block, the oldest project in the organization will be used.`,
Type: schema.TypeString,
@@ -65,11 +64,12 @@ If a project is not configured in the HCP Provider config block, the oldest proj
Computed: true,
},
"iteration": {
- Description: "The iteration assigned to the channel.",
+ Description: "The iteration assigned to the channel. This block is deprecated. Please use `hcp_packer_channel_assignment` instead.",
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
Computed: true,
+ Deprecated: "The `iteration` block is deprecated. Please remove the `iteration` block and create a new `hcp_packer_channel_assignment` resource to manage this channel's assigned iteration with Terraform.",
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"fingerprint": {
@@ -97,6 +97,11 @@ If a project is not configured in the HCP Provider config block, the oldest proj
},
},
// Computed Values
+ "restricted": {
+ Description: "If true, the channel is only visible to users with permission to create and manage it. Otherwise the channel is visible to every member of the organization.",
+ Type: schema.TypeBool,
+ Computed: true,
+ },
"author_id": {
Description: "The author of this channel.",
Type: schema.TypeString,
@@ -108,7 +113,7 @@ If a project is not configured in the HCP Provider config block, the oldest proj
Computed: true,
},
"organization_id": {
- Description: "The ID of the HCP organization where this channel is located in.",
+ Description: "The ID of the HCP organization where this channel is located.",
Type: schema.TypeString,
Computed: true,
},
@@ -142,27 +147,20 @@ func resourcePackerChannelRead(ctx context.Context, d *schema.ResourceData, meta
log.Printf("[INFO] Reading HCP Packer channel (%s) [bucket_name=%s, project_id=%s, organization_id=%s]", channelName, bucketName, loc.ProjectID, loc.OrganizationID)
- resp, err := clients.ListBucketChannels(ctx, client, loc, bucketName)
+ channel, err := clients.GetPackerChannelBySlugFromList(ctx, client, loc, bucketName, channelName)
if err != nil {
return diag.FromErr(err)
}
- var channel packermodels.HashicorpCloudPackerChannel
- for _, c := range resp.Channels {
- if c.Slug == channelName {
- channel = *c
- break
- }
- }
- if channel.ID == "" {
+ if channel == nil {
log.Printf(
- "[WARN] HCP Packer chanel with (name %q) (bucket_name %q) (project_id %q) not found, removing from state.",
+ "[WARN] HCP Packer channel with (name %q) (bucket_name %q) (project_id %q) not found, removing from state.",
channelName, bucketName, loc.ProjectID,
)
d.SetId("")
return nil
}
- return setPackerChannelResourceData(d, &channel)
+ return setPackerChannelResourceData(d, channel)
}
func resourcePackerChannelCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
@@ -183,26 +181,17 @@ func resourcePackerChannelCreate(ctx context.Context, d *schema.ResourceData, me
return diag.FromErr(err)
}
- iterationConfig, ok := d.GetOk("iteration")
- if !ok {
- channel, err := clients.CreateBucketChannel(ctx, client, loc, bucketName, channelName, nil)
- if err != nil {
- return diag.FromErr(err)
- }
+ var iteration *packermodels.HashicorpCloudPackerIteration
- if channel == nil {
- return diag.Errorf("Unable to create channel in bucket %s named %s.", bucketName, channelName)
+ if _, ok := d.GetOk("iteration.0"); ok {
+ iteration = &packermodels.HashicorpCloudPackerIteration{
+ IncrementalVersion: int32(d.Get("iteration.0.incremental_version").(int)),
+ ID: d.Get("iteration.0.id").(string),
+ Fingerprint: d.Get("iteration.0.fingerprint").(string),
}
-
- return setPackerChannelResourceData(d, channel)
- }
-
- var iteration *packermodels.HashicorpCloudPackerIteration
- if config, ok := iterationConfig.([]interface{})[0].(map[string]interface{}); ok {
- iteration = expandIterationConfig(config)
}
- channel, err := clients.CreateBucketChannel(ctx, client, loc, bucketName, channelName, iteration)
+ channel, err := clients.CreateBucketChannel(ctx, client, loc, bucketName, channelName, iteration, nil)
if err != nil {
return diag.FromErr(err)
}
@@ -233,35 +222,21 @@ func resourcePackerChannelUpdate(ctx context.Context, d *schema.ResourceData, me
}
var iteration *packermodels.HashicorpCloudPackerIteration
- iterationConfig, ok := d.GetOk("iteration")
- if !ok || iterationConfig.([]interface{})[0] == nil {
- channel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, iteration)
- if err != nil {
- return diag.FromErr(err)
- }
- return setPackerChannelResourceData(d, channel)
- }
- config, ok := iterationConfig.([]interface{})[0].(map[string]interface{})
- if !ok {
- return diag.Errorf("Failed to read iteration configuration during update.")
- }
-
- updatedIterationConfig := make(map[string]interface{})
- for key, value := range config {
- fullKey := fmt.Sprintf("iteration.0.%s", key)
- // Upstream API doesn't know how to handle the case when all params are set;
- // So we keep the inputs that are not coming from state.
- if d.HasChange(fullKey) {
- updatedIterationConfig[key] = value
+ if _, ok := d.GetOk("iteration.0"); ok {
+ iteration = &packermodels.HashicorpCloudPackerIteration{}
+ if !d.HasChange("iteration.0") || d.HasChange("iteration.0.incremental_version") {
+ iteration.IncrementalVersion = int32(d.Get("iteration.0.incremental_version").(int))
+ }
+ if !d.HasChange("iteration.0") || d.HasChange("iteration.0.id") {
+ iteration.ID = d.Get("iteration.0.id").(string)
+ }
+ if !d.HasChange("iteration.0") || d.HasChange("iteration.0.fingerprint") {
+ iteration.Fingerprint = d.Get("iteration.0.fingerprint").(string)
}
}
- if len(updatedIterationConfig) != 0 {
- iteration = expandIterationConfig(updatedIterationConfig)
- }
-
- channel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, iteration)
+ channel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, iteration, nil)
if err != nil {
return diag.FromErr(err)
}
@@ -343,20 +318,13 @@ func resourcePackerChannelImport(ctx context.Context, d *schema.ResourceData, me
return nil, err
}
- resp, err := clients.ListBucketChannels(ctx, client, loc, bucketName)
+
+ channel, err := clients.GetPackerChannelBySlugFromList(ctx, client, loc, bucketName, channelName)
if err != nil {
return nil, err
}
- var channel packermodels.HashicorpCloudPackerChannel
- for _, c := range resp.Channels {
- if c.Slug == channelName {
- channel = *c
- break
- }
- }
-
- if channel.ID == "" {
+ if channel == nil {
return nil, fmt.Errorf("unable to find channel in bucket %s named %s", bucketName, channelName)
}
@@ -372,10 +340,6 @@ func resourcePackerChannelImport(ctx context.Context, d *schema.ResourceData, me
return nil, err
}
- if channel.Iteration == nil {
- return []*schema.ResourceData{d}, nil
- }
-
return []*schema.ResourceData{d}, nil
}
@@ -401,31 +365,47 @@ func resourcePackerChannelCustomizeDiff(ctx context.Context, d *schema.ResourceD
if d.HasChange("iteration.0") {
var iterationResponse *packermodels.HashicorpCloudPackerIteration
var err error
- if id, ok := d.GetOk("iteration.0.id"); d.HasChange("iteration.0.id") && ok && id.(string) != "" {
+ if id, ok := d.GetOk("iteration.0.id"); ok && d.HasChange("iteration.0.id") && id.(string) != "" {
iterationResponse, err = clients.GetIterationFromID(ctx, client, loc, bucketName, id.(string))
- } else if fingerprint, ok := d.GetOk("iteration.0.fingerprint"); d.HasChange("iteration.0.fingerprint") && ok && fingerprint.(string) != "" {
+ } else if fingerprint, ok := d.GetOk("iteration.0.fingerprint"); ok && d.HasChange("iteration.0.fingerprint") && fingerprint.(string) != "" {
iterationResponse, err = clients.GetIterationFromFingerprint(ctx, client, loc, bucketName, fingerprint.(string))
- } else if version, ok := d.GetOk("iteration.0.incremental_version"); d.HasChange("iteration.0.incremental_version") && ok && version.(int) > 0 {
+ } else if version, ok := d.GetOk("iteration.0.incremental_version"); ok && d.HasChange("iteration.0.incremental_version") && version.(int) > 0 {
iterationResponse, err = clients.GetIterationFromVersion(ctx, client, loc, bucketName, int32(version.(int)))
}
if err != nil {
return err
}
- iteration := []map[string]interface{}{}
+ iterations := []map[string]interface{}{}
if iterationResponse != nil {
- iteration = append(iteration, map[string]interface{}{
+ iterations = append(iterations, map[string]interface{}{
"id": iterationResponse.ID,
"fingerprint": iterationResponse.Fingerprint,
"incremental_version": iterationResponse.IncrementalVersion,
})
+ } else {
+ iterations = append(iterations, map[string]interface{}{
+ "id": "",
+ "fingerprint": "",
+ "incremental_version": 0,
+ })
}
- err = d.SetNew("iteration", iteration)
+
+ err = d.SetNew("iteration", iterations)
if err != nil {
return err
}
}
+ if d.HasChanges("iteration") {
+ if err := d.SetNewComputed("updated_at"); err != nil {
+ return err
+ }
+ if err := d.SetNewComputed("author_id"); err != nil {
+ return err
+ }
+ }
+
return nil
}
@@ -436,6 +416,7 @@ func setPackerChannelResourceData(d *schema.ResourceData, channel *packermodels.
}
d.SetId(channel.ID)
+
if err := d.Set("author_id", channel.AuthorID); err != nil {
return diag.FromErr(err)
}
@@ -452,37 +433,29 @@ func setPackerChannelResourceData(d *schema.ResourceData, channel *packermodels.
return diag.FromErr(err)
}
- return nil
-}
-
-func expandIterationConfig(config map[string]interface{}) *packermodels.HashicorpCloudPackerIteration {
- if config == nil {
- return nil
- }
-
- var iteration packermodels.HashicorpCloudPackerIteration
- if v, ok := config["id"]; ok && v.(string) != "" {
- iteration.ID = v.(string)
- }
- if v, ok := config["fingerprint"]; ok && v.(string) != "" {
- iteration.Fingerprint = v.(string)
- }
- if v, ok := config["incremental_version"]; ok && v.(int) != 0 {
- iteration.IncrementalVersion = int32(v.(int))
+ if err := d.Set("restricted", channel.Restricted); err != nil {
+ return diag.FromErr(err)
}
- return &iteration
+ return nil
}
func flattenIterationConfig(iteration *packermodels.HashicorpCloudPackerIteration) []map[string]interface{} {
result := make([]map[string]interface{}, 0)
if iteration == nil {
+ result = append(result, map[string]interface{}{
+ "id": "",
+ "fingerprint": "",
+ "incremental_version": 0,
+ })
return result
+ } else {
+ result = append(result, map[string]interface{}{
+ "id": iteration.ID,
+ "fingerprint": iteration.Fingerprint,
+ "incremental_version": iteration.IncrementalVersion,
+ })
}
- item := make(map[string]interface{})
- item["id"] = iteration.ID
- item["fingerprint"] = iteration.Fingerprint
- item["incremental_version"] = iteration.IncrementalVersion
- return append(result, item)
+ return result
}
diff --git a/internal/provider/resource_packer_channel_assignment.go b/internal/provider/resource_packer_channel_assignment.go
new file mode 100644
index 000000000..1819e84f2
--- /dev/null
+++ b/internal/provider/resource_packer_channel_assignment.go
@@ -0,0 +1,462 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "log"
+ "math"
+ "strings"
+
+ packermodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models"
+ sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
+ "github.com/hashicorp/terraform-provider-hcp/internal/clients"
+)
+
+// This string is used to represent an unassigned (or "null") channel
+// assignment for iteration identifiers of the String type
+const unassignString string = "none"
+
+func resourcePackerChannelAssignment() *schema.Resource {
+ return &schema.Resource{
+ Description: "The Packer Channel Assignment resource allows you to manage the iteration assigned to a bucket channel in an active HCP Packer Registry.",
+ CreateContext: resourcePackerChannelAssignmentCreate,
+ DeleteContext: resourcePackerChannelAssignmentDelete,
+ ReadContext: resourcePackerChannelAssignmentRead,
+ UpdateContext: resourcePackerChannelAssignmentUpdate,
+ Timeouts: &schema.ResourceTimeout{
+ Create: &defaultPackerTimeout,
+ Default: &defaultPackerTimeout,
+ Update: &defaultPackerTimeout,
+ Delete: &defaultPackerTimeout,
+ },
+ Importer: &schema.ResourceImporter{
+ StateContext: resourcePackerChannelAssignmentImport,
+ },
+ CustomizeDiff: resourcePackerChannelAssignmentCustomizeDiff,
+ Schema: map[string]*schema.Schema{
+ // Required inputs
+ "channel_name": {
+ Description: "The name of the HCP Packer channel being managed.",
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateDiagFunc: validateSlugID,
+ },
+ "bucket_name": {
+ Description: "The slug of the HCP Packer Registry bucket where the channel is located.",
+ Type: schema.TypeString,
+ Required: true,
+ ForceNew: true,
+ ValidateDiagFunc: validateSlugID,
+ },
+ // Optional inputs
+ "project_id": {
+ Description: `
+The ID of the HCP project where the channel is located.
+If not specified, the project specified in the HCP Provider config block will be used, if configured.
+If a project is not configured in the HCP Provider config block, the oldest project in the organization will be used.`,
+ Type: schema.TypeString,
+ Computed: true,
+ Optional: true,
+ ForceNew: true,
+ ValidateFunc: validation.IsUUID,
+ },
+ "iteration_fingerprint": {
+ Description: "The fingerprint of the iteration assigned to the channel.",
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ExactlyOneOf: []string{"iteration_id", "iteration_fingerprint", "iteration_version"},
+ ValidateFunc: validation.StringIsNotEmpty,
+ },
+ "iteration_id": {
+ Description: "The ID of the iteration assigned to the channel.",
+ Type: schema.TypeString,
+ Optional: true,
+ Computed: true,
+ ExactlyOneOf: []string{"iteration_id", "iteration_fingerprint", "iteration_version"},
+ ValidateFunc: validation.StringIsNotEmpty,
+ },
+ "iteration_version": {
+ Description: "The incremental version of the iteration assigned to the channel.",
+ Type: schema.TypeInt,
+ Optional: true,
+ Computed: true,
+ ExactlyOneOf: []string{"iteration_id", "iteration_fingerprint", "iteration_version"},
+ ValidateFunc: validation.IntBetween(0, math.MaxInt32),
+ },
+ // Computed Values
+ "organization_id": {
+ Description: "The ID of the HCP organization where this channel is located. Always the same as the associated channel.",
+ Type: schema.TypeString,
+ Computed: true,
+ },
+ },
+ }
+}
+
+func resourcePackerChannelAssignmentRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client := meta.(*clients.Client)
+ projectID, err := GetProjectID(d.Get("project_id").(string), client.Config.ProjectID)
+ if err != nil {
+ return diag.Errorf("unable to retrieve project ID: %v", err)
+ }
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("channel_name").(string)
+
+ channel, err := clients.GetPackerChannelBySlugFromList(ctx, client, loc, bucketName, channelName)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ if channel == nil {
+ d.SetId("")
+ return diag.Diagnostics{diag.Diagnostic{
+ Severity: diag.Error,
+ Summary: fmt.Sprintf("HCP Packer channel with (channel_name %q) (bucket_name %q) (project_id %q) not found.", channelName, bucketName, loc.ProjectID),
+ }}
+ }
+
+ if err := setPackerChannelAssignmentIterationData(d, channel.Iteration); err != nil {
+ return diag.FromErr(err)
+ }
+
+ return nil
+}
+
+func resourcePackerChannelAssignmentCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client := meta.(*clients.Client)
+ projectID, err := GetProjectID(d.Get("project_id").(string), client.Config.ProjectID)
+ if err != nil {
+ return diag.Errorf("unable to retrieve project ID: %v", err)
+ }
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("channel_name").(string)
+
+ channel, err := clients.GetPackerChannelBySlugFromList(ctx, client, loc, bucketName, channelName)
+ if err != nil {
+ return diag.FromErr(err)
+ } else if channel == nil {
+ return diag.Diagnostics{diag.Diagnostic{
+ Severity: diag.Error,
+ Summary: fmt.Sprintf("HCP Packer channel with (channel_name %q) (bucket_name %q) (project_id %q) not found.", channelName, bucketName, loc.ProjectID),
+ }}
+ } else if channel.Managed {
+ return diag.Diagnostics{diag.Diagnostic{
+ Severity: diag.Error,
+ Summary: fmt.Sprintf("HCP Packer channel with (channel_name %q) (bucket_name %q) (project_id %q) is managed by HCP Packer and cannot have an iteration assigned by Terraform.", channelName, bucketName, loc.ProjectID),
+ }}
+ } else if iteration := channel.Iteration; iteration != nil && (iteration.IncrementalVersion > 0 || iteration.ID != "" || iteration.Fingerprint != "") {
+ return diag.Diagnostics{diag.Diagnostic{
+ Severity: diag.Error,
+ Summary: fmt.Sprintf("HCP Packer channel with (channel_name %q) (bucket_name %q) (project_id %q) already has an assigned iteration.", channelName, bucketName, loc.ProjectID),
+ Detail: "To adopt this resource into Terraform, use `terraform import`, or remove the channel's assigned iteration using the HCP Packer GUI/API.",
+ }}
+ }
+
+ iterID := d.Get("iteration_id").(string)
+ if iterID == unassignString {
+ iterID = ""
+ }
+
+ iterFingerprint := d.Get("iteration_fingerprint").(string)
+ if iterFingerprint == unassignString {
+ iterFingerprint = ""
+ }
+
+ updatedChannel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName,
+ &packermodels.HashicorpCloudPackerIteration{
+ IncrementalVersion: int32(d.Get("iteration_version").(int)),
+ ID: iterID,
+ Fingerprint: iterFingerprint,
+ }, nil,
+ )
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ d.SetId(updatedChannel.ID)
+
+ if err := setPackerChannelAssignmentIterationData(d, updatedChannel.Iteration); err != nil {
+ return diag.FromErr(err)
+ }
+
+ return nil
+}
+
+func resourcePackerChannelAssignmentUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client := meta.(*clients.Client)
+ projectID, err := GetProjectID(d.Get("project_id").(string), client.Config.ProjectID)
+ if err != nil {
+ return diag.Errorf("unable to retrieve project ID: %v", err)
+ }
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return diag.FromErr(err)
+ }
+
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("channel_name").(string)
+
+ iteration := &packermodels.HashicorpCloudPackerIteration{}
+ assignmentHasChanges := d.HasChanges("iteration_version", "iteration_id", "iteration_fingerprint")
+ if !assignmentHasChanges || d.HasChange("iteration_version") {
+ iteration.IncrementalVersion = int32(d.Get("iteration_version").(int))
+ }
+ if iterID := d.Get("iteration_id").(string); (!assignmentHasChanges || d.HasChange("iteration_id")) && iterID != unassignString {
+ iteration.ID = iterID
+ }
+ if iterFingerpint := d.Get("iteration_fingerprint").(string); (!assignmentHasChanges || d.HasChange("iteration_fingerprint")) && iterFingerpint != unassignString {
+ iteration.Fingerprint = iterFingerpint
+ }
+
+ updatedChannel, err := clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, iteration, nil)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ if err := setPackerChannelAssignmentIterationData(d, updatedChannel.Iteration); err != nil {
+ return diag.FromErr(err)
+ }
+
+ return nil
+}
+
+func resourcePackerChannelAssignmentDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
+ client := meta.(*clients.Client)
+ projectID, err := GetProjectID(d.Get("project_id").(string), client.Config.ProjectID)
+ if err != nil {
+ return diag.Errorf("unable to retrieve project ID: %v", err)
+ }
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }
+
+ bucketName := d.Get("bucket_name").(string)
+ channelName := d.Get("channel_name").(string)
+
+ _, err = clients.UpdateBucketChannel(ctx, client, loc, bucketName, channelName, nil, nil)
+ if err != nil {
+ return diag.FromErr(err)
+ }
+
+ return nil
+}
+
+func resourcePackerChannelAssignmentImport(ctx context.Context, d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
+ // with multi-projects, import arguments must become dynamic:
+ // use explicit project ID with terraform import:
+ // terraform import hcp_packer_channel_assignment.test {project_id}:{bucket_name}:{channel_name}
+ // use default project ID from provider:
+ // terraform import hcp_packer_channel_assignment.test {bucket_name}:{channel_name}
+
+ client := meta.(*clients.Client)
+ bucketName := ""
+ channelName := ""
+ projectID := ""
+ var err error
+ // Updates the source channel to include data about the module used.
+ client, err = client.UpdateSourceChannel(d)
+ if err != nil {
+ log.Printf("[DEBUG] Failed to update analytics with module name (%s)", err)
+ }
+
+ idParts := strings.SplitN(d.Id(), ":", 3)
+ if len(idParts) == 3 { // {project_id}:{bucket_name}:{channel_name}
+ if idParts[0] == "" || idParts[1] == "" || idParts[2] == "" {
+ return nil, fmt.Errorf("unexpected format of ID (%q), expected {project_id}:{bucket_name}:{channel_name}", d.Id())
+ }
+ projectID = idParts[0]
+ bucketName = idParts[1]
+ channelName = idParts[2]
+ } else if len(idParts) == 2 { // {bucket_name}:{channel_name}
+ if idParts[0] == "" || idParts[1] == "" {
+ return nil, fmt.Errorf("unexpected format of ID (%q), expected {bucket_name}:{channel_name}", d.Id())
+ }
+ projectID, err = GetProjectID(projectID, client.Config.ProjectID)
+ if err != nil {
+ return nil, fmt.Errorf("unable to retrieve project ID: %v", err)
+ }
+ bucketName = idParts[0]
+ channelName = idParts[1]
+ } else {
+ return nil, fmt.Errorf("unexpected format of ID (%q), expected {bucket_name}:{channel_name} or {project_id}:{bucket_name}:{channel_name}", d.Id())
+ }
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }
+ if err := setLocationData(d, loc); err != nil {
+ return nil, err
+ }
+
+ if err := d.Set("bucket_name", bucketName); err != nil {
+ return nil, err
+ }
+ if err := d.Set("channel_name", channelName); err != nil {
+ return nil, err
+ }
+
+ channel, err := clients.GetPackerChannelBySlugFromList(ctx, client, loc, bucketName, channelName)
+ if err != nil {
+ return nil, err
+ } else if channel == nil {
+ return nil, fmt.Errorf("HCP Packer channel with (channel_name %q) (bucket_name %q) (project_id %q) not found", channelName, bucketName, loc.ProjectID)
+ } else if channel.Managed {
+ return nil, fmt.Errorf("HCP Packer channel with (channel_name %q) (bucket_name %q) (project_id %q) is managed by HCP Packer and cannot have an iteration assigned by Terraform", channelName, bucketName, loc.ProjectID)
+ }
+
+ d.SetId(channel.ID)
+
+ if err := setPackerChannelAssignmentIterationData(d, channel.Iteration); err != nil {
+ return nil, err
+ }
+
+ return []*schema.ResourceData{d}, nil
+}
+
+func resourcePackerChannelAssignmentCustomizeDiff(ctx context.Context, d *schema.ResourceDiff, meta interface{}) error {
+ client := meta.(*clients.Client)
+ var err error
+ projectID, err := GetProjectID(d.Get("project_id").(string), client.Config.ProjectID)
+ if err != nil {
+ return fmt.Errorf("unable to retrieve project ID: %v", err)
+ }
+
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }
+
+ bucketName := d.Get("bucket_name").(string)
+
+ if (d.HasChange("iteration_id") && !d.NewValueKnown("iteration_id")) ||
+ (d.HasChange("iteration_fingerprint") && !d.NewValueKnown("iteration_fingerprint")) ||
+ (d.HasChanges("iteration_version") && !d.NewValueKnown("iteration_id")) {
+ if err := d.SetNewComputed("iteration_id"); err != nil {
+ return err
+ }
+ if err := d.SetNewComputed("iteration_fingerprint"); err != nil {
+ return err
+ }
+ if err := d.SetNewComputed("iteration_version"); err != nil {
+ return err
+ }
+ } else {
+ var iteration *packermodels.HashicorpCloudPackerIteration
+ var itErr error
+
+ if rawID, ok := d.GetOk("iteration_id"); ok && d.HasChange("iteration_id") && d.NewValueKnown("iteration_id") {
+ if id := rawID.(string); id != unassignString {
+ iteration, itErr = clients.GetIterationFromID(ctx, client, loc, bucketName, id)
+ } else {
+ iteration = &packermodels.HashicorpCloudPackerIteration{}
+ }
+ } else if rawFingerprint, ok := d.GetOk("iteration_fingerprint"); ok && d.HasChange("iteration_fingerprint") && d.NewValueKnown("iteration_fingerprint") {
+ if fingerprint := rawFingerprint.(string); fingerprint != unassignString {
+ iteration, itErr = clients.GetIterationFromFingerprint(ctx, client, loc, bucketName, fingerprint)
+ } else {
+ iteration = &packermodels.HashicorpCloudPackerIteration{}
+ }
+ } else if rawVersion, ok := d.GetOk("iteration_version"); ok && d.HasChange("iteration_version") && d.NewValueKnown("iteration_version") {
+ if version := int32(rawVersion.(int)); version != 0 {
+ iteration, itErr = clients.GetIterationFromVersion(ctx, client, loc, bucketName, version)
+ } else {
+ iteration = &packermodels.HashicorpCloudPackerIteration{}
+ }
+ }
+
+ if itErr != nil {
+ return itErr
+ } else if iteration != nil {
+ if err := d.SetNew("iteration_version", iteration.IncrementalVersion); err != nil {
+ return err
+ }
+
+ id := iteration.ID
+ if id == "" {
+ id = unassignString
+ }
+ if err := d.SetNew("iteration_id", id); err != nil {
+ return err
+ }
+
+ fingerprint := iteration.Fingerprint
+ if fingerprint == "" {
+ fingerprint = unassignString
+ }
+ if err := d.SetNew("iteration_fingerprint", fingerprint); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func setPackerChannelAssignmentIterationData(d *schema.ResourceData, i *packermodels.HashicorpCloudPackerIteration) error {
+ var iteration packermodels.HashicorpCloudPackerIteration
+
+ if i == nil {
+ iteration = packermodels.HashicorpCloudPackerIteration{
+ IncrementalVersion: 0,
+ ID: "",
+ Fingerprint: "",
+ }
+ } else {
+ iteration = *i
+ }
+
+ if err := d.Set("iteration_version", iteration.IncrementalVersion); err != nil {
+ return err
+ }
+
+ id := iteration.ID
+ if id == "" {
+ id = unassignString
+ }
+ if err := d.Set("iteration_id", id); err != nil {
+ return err
+ }
+
+ fingerprint := iteration.Fingerprint
+ if fingerprint == "" {
+ fingerprint = unassignString
+ }
+ if err := d.Set("iteration_fingerprint", fingerprint); err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/internal/provider/resource_packer_channel_assignment_test.go b/internal/provider/resource_packer_channel_assignment_test.go
new file mode 100644
index 000000000..3bb297a6a
--- /dev/null
+++ b/internal/provider/resource_packer_channel_assignment_test.go
@@ -0,0 +1,549 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "regexp"
+ "testing"
+
+ "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
+ "github.com/hashicorp/terraform-provider-hcp/internal/clients"
+)
+
+func TestAccPackerChannelAssignment_SimpleSetUnset(t *testing.T) {
+ bucketSlug := testAccCreateSlug("AssignmentSimpleSetUnset")
+ channelSlug := bucketSlug // No need for a different slug
+ iterationFingerprint := "1"
+
+ var iteration *models.HashicorpCloudPackerIteration
+
+ baseAssignment := testAccPackerAssignmentBuilderBase("SimpleSetUnset", fmt.Sprintf("%q", bucketSlug), fmt.Sprintf("%q", channelSlug))
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() {
+ testAccPreCheck(t, map[string]bool{"aws": false, "azure": false})
+ upsertRegistry(t)
+ upsertBucket(t, bucketSlug)
+ upsertChannel(t, bucketSlug, channelSlug, "")
+ iteration = upsertCompleteIteration(t, bucketSlug, iterationFingerprint)
+ },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(state *terraform.State) error {
+ if err := testAccCheckAssignmentDestroyed(baseAssignment.ResourceName())(state); err != nil {
+ t.Error(err)
+ }
+ deleteBucket(t, bucketSlug, true)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ { // Set channel assignment to the iteration
+ Config: testConfig(testAccConfigBuildersToString(testAccPackerAssignmentBuilderFromAssignment(
+ baseAssignment,
+ ``, fmt.Sprintf("%q", iterationFingerprint), ``,
+ ))),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ testAccCheckAssignmentStateBucketAndChannelName(baseAssignment.ResourceName(), bucketSlug, channelSlug),
+ testAccCheckAssignmentStateMatchesIteration(baseAssignment.ResourceName(), &iteration),
+ testAccCheckAssignmentStateMatchesAPI(baseAssignment.ResourceName()),
+ ),
+ },
+ { // Validate importing channel assignments that are already set
+ ResourceName: baseAssignment.ResourceName(),
+ ImportState: true,
+ ImportStateId: fmt.Sprintf("%s:%s", bucketSlug, channelSlug),
+ ImportStateVerify: true,
+ },
+ { // Set channel assignment to null
+ Config: testConfig(testAccConfigBuildersToString(testAccPackerAssignmentBuilderFromAssignment(
+ baseAssignment,
+ ``, fmt.Sprintf("%q", unassignString), ``,
+ ))),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ testAccCheckAssignmentStateBucketAndChannelName(baseAssignment.ResourceName(), bucketSlug, channelSlug),
+ testAccCheckAssignmentStateMatchesIteration(baseAssignment.ResourceName(), nil),
+ testAccCheckAssignmentStateMatchesAPI(baseAssignment.ResourceName()),
+ ),
+ },
+ { // Validate importing channel assignments that are null
+ ResourceName: baseAssignment.ResourceName(),
+ ImportState: true,
+ ImportStateId: fmt.Sprintf("%s:%s", bucketSlug, channelSlug),
+ ImportStateVerify: true,
+ },
+ },
+ })
+}
+
+func TestAccPackerChannelAssignment_AssignLatest(t *testing.T) {
+ bucketSlug := testAccCreateSlug("AssignmentAssignLatest")
+ channelSlug := bucketSlug // No need for a different slug
+ uniqueName := "AssignLatest"
+
+ // This config creates a data source that is read before apply time
+ beforeIteration := testAccPackerDataIterationBuilder(
+ uniqueName,
+ fmt.Sprintf("%q", bucketSlug),
+ `"latest"`,
+ )
+ beforeChannel := testAccPackerChannelBuilder(
+ uniqueName,
+ fmt.Sprintf("%q", channelSlug),
+ beforeIteration.AttributeRef("bucket_name"),
+ )
+ beforeAssignment := testAccPackerAssignmentBuilderWithChannelReference(
+ uniqueName,
+ beforeChannel,
+ beforeIteration.AttributeRef("id"), ``, ``,
+ )
+
+ // This config creates a data source that is read after apply time,
+ // which is important for testing that CustomizeDiff doesn't cause errors
+ afterChannel := testAccPackerChannelBuilder(
+ uniqueName,
+ fmt.Sprintf("%q", channelSlug),
+ fmt.Sprintf("%q", bucketSlug),
+ )
+ afterIteration := testAccPackerDataIterationBuilder(
+ uniqueName,
+ afterChannel.AttributeRef("bucket_name"),
+ `"latest"`,
+ )
+ afterAssignment := testAccPackerAssignmentBuilderWithChannelReference(
+ uniqueName,
+ afterChannel,
+ afterIteration.AttributeRef("id"), ``, ``,
+ )
+
+ var iteration *models.HashicorpCloudPackerIteration
+
+ generateStep := func(iterationData, channelResource, assignmentResource testAccConfigBuilderInterface) resource.TestStep {
+ return resource.TestStep{
+ Config: testConfig(testAccConfigBuildersToString(iterationData, channelResource, assignmentResource)),
+ Check: resource.ComposeAggregateTestCheckFunc(
+ testAccCheckAssignmentStateBucketAndChannelName(assignmentResource.ResourceName(), bucketSlug, channelSlug),
+ testAccCheckAssignmentStateMatchesIteration(assignmentResource.ResourceName(), &iteration),
+ testAccCheckAssignmentStateMatchesChannelState(assignmentResource.ResourceName(), channelResource.ResourceName()),
+ testAccCheckAssignmentStateMatchesAPI(assignmentResource.ResourceName()),
+ ),
+ }
+ }
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() {
+ testAccPreCheck(t, map[string]bool{"aws": false, "azure": false})
+ upsertRegistry(t)
+ upsertBucket(t, bucketSlug)
+ iteration = upsertCompleteIteration(t, bucketSlug, "abc")
+ },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(state *terraform.State) error {
+ deleteBucket(t, bucketSlug, true)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ generateStep(beforeIteration, beforeChannel, beforeAssignment),
+ { // Remove any resources and data sources completely
+ Config: testConfig(""),
+ },
+ generateStep(afterIteration, afterChannel, afterAssignment),
+ },
+ })
+}
+
+func TestAccPackerChannelAssignment_InvalidInputs(t *testing.T) {
+ bucketSlug := testAccCreateSlug("AssignmentInvalidInputs")
+ channelSlug := bucketSlug // No need for a different slug
+
+ generateStep := func(iterID string, iterFingerprint string, iterVersion string, errorRegex string) resource.TestStep {
+ return resource.TestStep{
+ Config: testConfig(testAccConfigBuildersToString(testAccPackerAssignmentBuilder(
+ "InvalidInputs",
+ fmt.Sprintf("%q", bucketSlug),
+ fmt.Sprintf("%q", channelSlug),
+ iterID, iterFingerprint, iterVersion,
+ ))),
+ ExpectError: regexp.MustCompile(errorRegex),
+ }
+ }
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() {
+ testAccPreCheck(t, map[string]bool{"aws": false, "azure": false})
+ upsertRegistry(t)
+ upsertBucket(t, bucketSlug)
+ upsertChannel(t, bucketSlug, channelSlug, "")
+ },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(state *terraform.State) error {
+ deleteBucket(t, bucketSlug, true)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ generateStep(
+ `""`, ``, ``,
+ `.*expected "iteration_id" to not be an empty string.*`,
+ ),
+ generateStep(
+ ``, `""`, ``,
+ `.*expected "iteration_fingerprint" to not be an empty string.*`,
+ ),
+ generateStep(
+ `"abcd"`, `"efgh"`, ``,
+ `.*only one of.*\n.*can be specified.*`,
+ ),
+ generateStep(
+ `"1234"`, ``, `5678`,
+ `.*only one of.*\n.*can be specified.*`,
+ ),
+ generateStep(
+ ``, `"jamesBond"`, `007`,
+ `.*only one of.*\n.*can be specified.*`,
+ ),
+ generateStep(
+ `"doesNotExist"`, ``, ``,
+ `The iteration with identifier.*does not exist`,
+ ),
+ generateStep(
+ ``, `"alsoDoesNotExist"`, ``,
+ `The iteration with identifier.*does not exist`,
+ ),
+ generateStep(
+ ``, ``, `99`,
+ `The iteration with identifier.*does not exist`,
+ ),
+ },
+ })
+}
+
+func TestAccPackerChannelAssignment_CreateFailsWhenPreassigned(t *testing.T) {
+ bucketSlug := testAccCreateSlug("AssignmentCreateFailPreassign")
+ channelSlug := bucketSlug // No need for a different slug
+ iterationFingerprint := "1"
+
+ channel := testAccPackerChannelBuilder(
+ channelSlug,
+ fmt.Sprintf("%q", channelSlug),
+ fmt.Sprintf("%q", bucketSlug),
+ )
+
+ assignment := testAccPackerAssignmentBuilderWithChannelReference(
+ "CreateFailsWhenPreassigned",
+ channel,
+ ``, ``, `0`,
+ )
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() {
+ testAccPreCheck(t, map[string]bool{"aws": false, "azure": false})
+ upsertRegistry(t)
+ upsertBucket(t, bucketSlug)
+ upsertCompleteIteration(t, bucketSlug, iterationFingerprint)
+ },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(state *terraform.State) error {
+ deleteBucket(t, bucketSlug, true)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ {
+ Config: testConfig(testAccConfigBuildersToString(channel)),
+ },
+ {
+ PreConfig: func() {
+ updateChannelAssignment(t,
+ bucketSlug,
+ channelSlug,
+ &models.HashicorpCloudPackerIteration{Fingerprint: iterationFingerprint},
+ )
+ },
+ Config: testConfig(testAccConfigBuildersToString(channel, assignment)),
+ ExpectError: regexp.MustCompile(".*channel with.*already has an assigned iteration.*"),
+ },
+ },
+ })
+}
+
+func TestAccPackerChannelAssignment_HCPManagedChannelErrors(t *testing.T) {
+ bucketSlug := testAccCreateSlug("AssignmentHCPManaged")
+ channelSlug := "latest"
+
+ assignment := testAccPackerAssignmentBuilder(
+ "HCPManagedChannelErrors",
+ fmt.Sprintf("%q", bucketSlug),
+ fmt.Sprintf("%q", channelSlug),
+ ``, ``, `0`,
+ )
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() {
+ testAccPreCheck(t, map[string]bool{"aws": false, "azure": false})
+ upsertRegistry(t)
+ upsertBucket(t, bucketSlug)
+ },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(state *terraform.State) error {
+ deleteBucket(t, bucketSlug, true)
+ return nil
+ },
+ Steps: []resource.TestStep{
+ {
+ Config: testConfig(testAccConfigBuildersToString(assignment)),
+ ExpectError: regexp.MustCompile(".*channel with.*is managed by HCP Packer.*"),
+ },
+ {
+ ResourceName: assignment.ResourceName(),
+ ImportState: true,
+ ImportStateId: fmt.Sprintf("%s:%s", bucketSlug, channelSlug),
+ ExpectError: regexp.MustCompile(".*channel with.*is managed by HCP Packer.*"),
+ },
+ },
+ })
+}
+
+// Test that all attributes generate and successfully apply plans to fix
+// the assignment when it is changed OOB from null to a non-null iteration
+func TestAccPackerChannelAssignment_EnforceNull(t *testing.T) {
+ bucketSlug := testAccCreateSlug("AssignmentEnforceNull")
+ channelSlug := bucketSlug // No need for a different slug
+
+ channel := testAccPackerChannelBuilder(channelSlug,
+ fmt.Sprintf("%q", channelSlug),
+ fmt.Sprintf("%q", bucketSlug),
+ )
+
+ var iteration1 *models.HashicorpCloudPackerIteration
+ var iteration2 *models.HashicorpCloudPackerIteration
+
+ baseAssignment := testAccPackerAssignmentBuilderBaseWithChannelReference("EnforceNull", channel)
+
+ generateEnforceNullCheckSteps := func(iterID string, iterFingerprint string, iterVersion string) []resource.TestStep {
+ assignment := testAccPackerAssignmentBuilderFromAssignment(
+ baseAssignment,
+ iterID, iterFingerprint, iterVersion,
+ )
+
+ config := testConfig(testAccConfigBuildersToString(channel, assignment))
+
+ checks := resource.ComposeAggregateTestCheckFunc(
+ testAccCheckAssignmentStateBucketAndChannelName(assignment.ResourceName(), bucketSlug, channelSlug),
+ testAccCheckAssignmentStateMatchesIteration(assignment.ResourceName(), nil),
+ testAccCheckAssignmentStateMatchesChannelState(assignment.ResourceName(), channel.ResourceName()),
+ testAccCheckAssignmentStateMatchesAPI(assignment.ResourceName()),
+ )
+
+ return []resource.TestStep{
+ { // Set up channel and set the assignment using Terraform
+ // This should be a no-op unless it is the first step, where the channel and null assignment are
+ // initially created. However, this step is included every time to make sure we've applied the
+ // assignment to the state at least once before checking if it is properly enforced against OOB changes.
+ Config: config,
+ Check: checks,
+ },
+ { // Change assignment OOB, then test with assignment set by Terraform
+ PreConfig: func() {
+ updateChannelAssignment(t, bucketSlug, channelSlug, &models.HashicorpCloudPackerIteration{ID: iteration1.ID})
+ updateChannelAssignment(t, bucketSlug, channelSlug, &models.HashicorpCloudPackerIteration{ID: iteration2.ID})
+ },
+ Config: config,
+ Check: checks,
+ },
+ }
+ }
+
+ var generatedSteps []resource.TestStep
+ // Add null ID steps
+ generatedSteps = append(generatedSteps, generateEnforceNullCheckSteps(fmt.Sprintf("%q", unassignString), ``, ``)...)
+ // Add null Fingerprint steps
+ generatedSteps = append(generatedSteps, generateEnforceNullCheckSteps(``, fmt.Sprintf("%q", unassignString), ``)...)
+ // Add null Version steps
+ generatedSteps = append(generatedSteps, generateEnforceNullCheckSteps(``, ``, `0`)...)
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() {
+ testAccPreCheck(t, map[string]bool{"aws": false, "azure": false})
+ upsertRegistry(t)
+ upsertBucket(t, bucketSlug)
+ // Pushing two iterations so that we can also implicitly verify that
+ // nullifying the assignment doesn't actually result in a rollback to iteration1
+ iteration1 = upsertCompleteIteration(t, bucketSlug, "1")
+ iteration2 = upsertCompleteIteration(t, bucketSlug, "2")
+ },
+ ProviderFactories: providerFactories,
+ CheckDestroy: func(state *terraform.State) error {
+ deleteBucket(t, bucketSlug, true)
+ return nil
+ },
+ Steps: generatedSteps,
+ })
+}
+
+func testAccPackerDataIterationBuilder(uniqueName string, bucketName string, channelName string) testAccConfigBuilderInterface {
+ return &testAccConfigBuilder{
+ isData: true,
+ resourceType: "hcp_packer_iteration",
+ uniqueName: uniqueName,
+ attributes: map[string]string{
+ "bucket_name": bucketName,
+ "channel": channelName,
+ },
+ }
+}
+
+func testAccPackerChannelBuilder(uniqueName string, channelName string, bucketName string) testAccConfigBuilderInterface {
+ return &testAccConfigBuilder{
+ resourceType: "hcp_packer_channel",
+ uniqueName: uniqueName,
+ attributes: map[string]string{
+ "name": channelName,
+ "bucket_name": bucketName,
+ },
+ }
+}
+
+// An AssignmentBuilder without any iteration fields set.
+// To be used downstream by other assignments to ensure core settings aren't changed.
+func testAccPackerAssignmentBuilderBase(uniqueName string, bucketName string, channelName string) testAccConfigBuilderInterface {
+ return testAccPackerAssignmentBuilder(
+ uniqueName,
+ bucketName,
+ channelName,
+ ``, ``, ``,
+ )
+}
+
+func testAccPackerAssignmentBuilderBaseWithChannelReference(uniqueName string, channel testAccConfigBuilderInterface) testAccConfigBuilderInterface {
+ return testAccPackerAssignmentBuilderBase(
+ uniqueName,
+ channel.AttributeRef("bucket_name"),
+ channel.AttributeRef("name"),
+ )
+}
+
+func testAccPackerAssignmentBuilder(uniqueName string, bucketName string, channelName string, iterID string, iterFingerprint string, iterVersion string) testAccConfigBuilderInterface {
+ return &testAccConfigBuilder{
+ resourceType: "hcp_packer_channel_assignment",
+ uniqueName: uniqueName,
+ attributes: map[string]string{
+ "bucket_name": bucketName,
+ "channel_name": channelName,
+ "iteration_id": iterID,
+ "iteration_fingerprint": iterFingerprint,
+ "iteration_version": iterVersion,
+ },
+ }
+}
+
+func testAccPackerAssignmentBuilderFromAssignment(oldAssignment testAccConfigBuilderInterface, iterID string, iterFingerprint string, iterVersion string) testAccConfigBuilderInterface {
+ return testAccPackerAssignmentBuilder(
+ oldAssignment.UniqueName(),
+ oldAssignment.Attributes()["bucket_name"],
+ oldAssignment.Attributes()["channel_name"],
+ iterID,
+ iterFingerprint,
+ iterVersion,
+ )
+}
+
+func testAccPackerAssignmentBuilderWithChannelReference(uniqueName string, channel testAccConfigBuilderInterface, iterID string, iterFingerprint string, iterVersion string) testAccConfigBuilderInterface {
+ return testAccPackerAssignmentBuilder(
+ uniqueName,
+ channel.AttributeRef("bucket_name"),
+ channel.AttributeRef("name"),
+ iterID, iterFingerprint, iterVersion,
+ )
+}
+
+func testAccCheckAssignmentStateBucketAndChannelName(resourceName string, bucketName string, channelName string) resource.TestCheckFunc {
+ return resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(resourceName, "bucket_name", bucketName),
+ resource.TestCheckResourceAttr(resourceName, "channel_name", channelName),
+ )
+}
+
+func testAccCheckAssignmentStateMatchesChannelState(assignmentResourceName string, channelResourceName string) resource.TestCheckFunc {
+ return resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttrPair(assignmentResourceName, "organization_id", channelResourceName, "organization_id"),
+ resource.TestCheckResourceAttrPair(assignmentResourceName, "project_id", channelResourceName, "project_id"),
+ resource.TestCheckResourceAttrPair(assignmentResourceName, "bucket_name", channelResourceName, "bucket_name"),
+ resource.TestCheckResourceAttrPair(assignmentResourceName, "channel_name", channelResourceName, "name"),
+ )
+}
+
+func testAccCheckAssignmentStateMatchesIteration(resourceName string, iterationPtr **models.HashicorpCloudPackerIteration) resource.TestCheckFunc {
+ return func(state *terraform.State) error {
+ var iteration *models.HashicorpCloudPackerIteration
+ if iterationPtr != nil {
+ iteration = *iterationPtr
+ }
+
+ if iteration == nil {
+ iteration = &models.HashicorpCloudPackerIteration{}
+ }
+
+ iterID := iteration.ID
+ if iterID == "" {
+ iterID = unassignString
+ }
+
+ iterFingerprint := iteration.Fingerprint
+ if iterFingerprint == "" {
+ iterFingerprint = unassignString
+ }
+
+ return resource.ComposeAggregateTestCheckFunc(
+ resource.TestCheckResourceAttr(resourceName, "iteration_id", iterID),
+ resource.TestCheckResourceAttr(resourceName, "iteration_fingerprint", iterFingerprint),
+ resource.TestCheckResourceAttr(resourceName, "iteration_version", fmt.Sprintf("%d", iteration.IncrementalVersion)),
+ )(state)
+ }
+}
+
+func testAccPullIterationFromAPIWithAssignmentState(resourceName string, state *terraform.State) (*models.HashicorpCloudPackerIteration, error) {
+ client := testAccProvider.Meta().(*clients.Client)
+
+ loc, _ := testAccGetLocationFromState(resourceName, state)
+
+ bucketName, err := testAccGetAttributeFromResourceInState(resourceName, "bucket_name", state)
+ if err != nil {
+ return nil, err
+ }
+ channelName, err := testAccGetAttributeFromResourceInState(resourceName, "channel_name", state)
+ if err != nil {
+ return nil, err
+ }
+
+ channel, err := clients.GetPackerChannelBySlug(context.Background(), client, loc, *bucketName, *channelName)
+ if err != nil {
+ return nil, err
+ }
+
+ return channel.Iteration, nil
+}
+
+func testAccCheckAssignmentStateMatchesAPI(resourceName string) resource.TestCheckFunc {
+ return func(state *terraform.State) error {
+ iteration, err := testAccPullIterationFromAPIWithAssignmentState(resourceName, state)
+ if err != nil {
+ return err
+ }
+ return testAccCheckAssignmentStateMatchesIteration(resourceName, &iteration)(state)
+ }
+}
+
+func testAccCheckAssignmentDestroyed(resourceName string) resource.TestCheckFunc {
+ return func(state *terraform.State) error {
+ iteration, err := testAccPullIterationFromAPIWithAssignmentState(resourceName, state)
+ if err != nil {
+ return fmt.Errorf("Unexpected error while validating channel assignment destruction. Got %v", err)
+ } else if iteration != nil && (iteration.ID != "" || iteration.Fingerprint != "" || iteration.IncrementalVersion != 0) {
+ return fmt.Errorf("Resource %q not properly destroyed", resourceName)
+ }
+
+ return nil
+ }
+}
diff --git a/internal/provider/statuspage.go b/internal/provider/statuspage.go
new file mode 100644
index 000000000..07a7e42ae
--- /dev/null
+++ b/internal/provider/statuspage.go
@@ -0,0 +1,127 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package provider
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+)
+
+// Status endpoint for prod.
+const statuspageURL = "https://status.hashicorp.com/api/v2/components.json"
+
+var hcpComponentIds = map[string]string{
+ "0q55nwmxngkc": "HCP API",
+ "sxffkgfb4fhb": "HCP Consul",
+ "0mbkqnrzg33w": "HCP Packer",
+ "mgv1p2j9x444": "HCP Portal",
+ "mb7xrbx9gjnq": "HCP Vault",
+}
+
+type statuspage struct {
+ Components []component `json:"components"`
+}
+
+type component struct {
+ ID string `json:"id"`
+ Status status `json:"status"`
+}
+
+type status string
+
+func isHCPOperational() (diags diag.Diagnostics) {
+ req, err := http.NewRequest("GET", statuspageURL, nil)
+ if err != nil {
+ diags = append(diags, diag.Diagnostic{
+ Severity: diag.Warning,
+ Summary: "You may experience issues using HCP.",
+ Detail: fmt.Sprintf("Unable to create request to verify HCP status: %s", err),
+ })
+
+ return diags
+ }
+
+ var cl = http.Client{}
+ resp, err := cl.Do(req)
+ if err != nil {
+ diags = append(diags, diag.Diagnostic{
+ Severity: diag.Warning,
+ Summary: "You may experience issues using HCP.",
+ Detail: fmt.Sprintf("Unable to complete request to verify HCP status: %s", err),
+ })
+
+ return diags
+ }
+ defer resp.Body.Close()
+
+ jsBytes, err := io.ReadAll(resp.Body)
+ if err != nil {
+ diags = append(diags, diag.Diagnostic{
+ Severity: diag.Warning,
+ Summary: "You may experience issues using HCP.",
+ Detail: fmt.Sprintf("Unable read response to verify HCP status: %s", err),
+ })
+
+ return diags
+ }
+
+ sp := statuspage{}
+ err = json.Unmarshal(jsBytes, &sp)
+ if err != nil {
+ diags = append(diags, diag.Diagnostic{
+ Severity: diag.Warning,
+ Summary: "You may experience issues using HCP.",
+ Detail: fmt.Sprintf("Unable unmarshal response to verify HCP status: %s", err),
+ })
+
+ return diags
+ }
+
+ // Translate the status page component IDs into a map of component name and operation status.
+ var systemStatus = map[string]status{}
+
+ for _, c := range sp.Components {
+ name, ok := hcpComponentIds[c.ID]
+ if ok {
+ systemStatus[name] = c.Status
+ }
+ }
+
+ operational := true
+ for _, st := range systemStatus {
+ if st != "operational" {
+ operational = false
+ }
+ }
+
+ if !operational {
+ diags = append(diags, diag.Diagnostic{
+ Severity: diag.Warning,
+ Summary: "You may experience issues using HCP.",
+ Detail: fmt.Sprintf("HCP is reporting the following:\n\n%v\nPlease check https://status.hashicorp.com for more details.", printStatus(systemStatus)),
+ })
+ }
+
+ return diags
+}
+
+func printStatus(m map[string]status) string {
+ var maxLenKey int
+ for k := range m {
+ if len(k) > maxLenKey {
+ maxLenKey = len(k)
+ }
+ }
+
+ pr := ""
+ for k, v := range m {
+ pr += fmt.Sprintf("%s:%*s %s\n", k, 5+(maxLenKey-len(k)), " ", v)
+ }
+
+ return pr
+}
diff --git a/internal/provider/test_helpers.go b/internal/provider/test_helpers.go
deleted file mode 100644
index 48a1a6c85..000000000
--- a/internal/provider/test_helpers.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: MPL-2.0
-
-package provider
-
-import (
- "fmt"
- "strings"
-
- "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
- "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
-)
-
-func testAccCheckFullURL(name, key, port string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[name]
- if !ok {
- return fmt.Errorf("not found: %s", name)
- }
-
- ep := rs.Primary.Attributes[key]
-
- if !strings.HasPrefix(ep, "https://") {
- return fmt.Errorf("URL missing scheme")
- }
-
- if port != "" {
- if !strings.HasSuffix(ep, fmt.Sprintf(":%s", port)) {
- return fmt.Errorf("URL missing port")
- }
- }
-
- return nil
- }
-}
diff --git a/internal/provider/test_helpers_packer_test.go b/internal/provider/test_helpers_packer_test.go
new file mode 100644
index 000000000..3da93b971
--- /dev/null
+++ b/internal/provider/test_helpers_packer_test.go
@@ -0,0 +1,480 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package provider
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "testing"
+ "time"
+
+ "github.com/cenkalti/backoff"
+ "github.com/go-openapi/strfmt"
+ "github.com/google/uuid"
+ "github.com/hashicorp/hcp-sdk-go/clients/cloud-operation/stable/2020-05-05/client/operation_service"
+ "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/client/packer_service"
+ "github.com/hashicorp/hcp-sdk-go/clients/cloud-packer-service/stable/2021-04-30/models"
+ sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models"
+ "github.com/hashicorp/terraform-provider-hcp/internal/clients"
+ "google.golang.org/grpc/codes"
+)
+
+func upsertRegistry(t *testing.T) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ params := packer_service.NewPackerServiceCreateRegistryParams()
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ featureTier := models.HashicorpCloudPackerRegistryConfigTierPLUS
+ params.Body = packer_service.PackerServiceCreateRegistryBody{
+ FeatureTier: &featureTier,
+ }
+
+ resp, err := client.Packer.PackerServiceCreateRegistry(params, nil)
+ if err, ok := err.(*packer_service.PackerServiceCreateRegistryDefault); ok {
+ switch err.Code() {
+ case int(codes.AlreadyExists), http.StatusConflict:
+ getParams := packer_service.NewPackerServiceGetRegistryParams()
+ getParams.LocationOrganizationID = loc.OrganizationID
+ getParams.LocationProjectID = loc.ProjectID
+ getResp, err := client.Packer.PackerServiceGetRegistry(getParams, nil)
+ if err != nil {
+ t.Errorf("unexpected GetRegistry error: %v", err)
+ return
+ }
+ if *getResp.Payload.Registry.Config.FeatureTier != models.HashicorpCloudPackerRegistryConfigTierPLUS {
+ // Make sure is a plus registry
+ params := packer_service.NewPackerServiceUpdateRegistryParams()
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ featureTier := models.HashicorpCloudPackerRegistryConfigTierPLUS
+ params.Body = packer_service.PackerServiceUpdateRegistryBody{
+ FeatureTier: &featureTier,
+ }
+ resp, err := client.Packer.PackerServiceUpdateRegistry(params, nil)
+ if err != nil {
+ t.Errorf("unexpected UpdateRegistry error: %v", err)
+ return
+ }
+ waitForOperation(t, loc, "Reactivate Registry", resp.Payload.Operation.ID, client)
+ }
+ return
+ default:
+ t.Errorf("unexpected CreateRegistry error, expected nil or 409. Got code: %d err: %v", err.Code(), err)
+ return
+ }
+ }
+
+ waitForOperation(t, loc, "Create Registry", resp.Payload.Operation.ID, client)
+}
+
+func waitForOperation(
+ t *testing.T,
+ loc *sharedmodels.HashicorpCloudLocationLocation,
+ operationName string,
+ operationID string,
+ client *clients.Client,
+) {
+ timeout := "5s"
+ params := operation_service.NewWaitParams()
+ params.ID = operationID
+ params.Timeout = &timeout
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+
+ operation := func() error {
+ resp, err := client.Operation.Wait(params, nil)
+ if err != nil {
+ t.Errorf("unexpected error %#v", err)
+ }
+
+ if resp.Payload.Operation.Error != nil {
+ t.Errorf("Operation failed: %s", resp.Payload.Operation.Error.Message)
+ }
+
+ switch *resp.Payload.Operation.State {
+ case sharedmodels.HashicorpCloudOperationOperationStatePENDING:
+ msg := fmt.Sprintf("==> Operation \"%s\" pending...", operationName)
+ return fmt.Errorf(msg)
+ case sharedmodels.HashicorpCloudOperationOperationStateRUNNING:
+ msg := fmt.Sprintf("==> Operation \"%s\" running...", operationName)
+ return fmt.Errorf(msg)
+ case sharedmodels.HashicorpCloudOperationOperationStateDONE:
+ default:
+ t.Errorf("Operation returned unknown state: %s", *resp.Payload.Operation.State)
+ }
+ return nil
+ }
+
+ bo := backoff.NewExponentialBackOff()
+ bo.InitialInterval = 10 * time.Second
+ bo.RandomizationFactor = 0.5
+ bo.Multiplier = 1.5
+ bo.MaxInterval = 30 * time.Second
+ bo.MaxElapsedTime = 40 * time.Minute
+ err := backoff.Retry(operation, bo)
+ if err != nil {
+ t.Errorf("unexpected error: %#v", err)
+ }
+}
+
+func upsertBucket(t *testing.T, bucketSlug string) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ createBktParams := packer_service.NewPackerServiceCreateBucketParams()
+ createBktParams.LocationOrganizationID = loc.OrganizationID
+ createBktParams.LocationProjectID = loc.ProjectID
+ createBktParams.Body = packer_service.PackerServiceCreateBucketBody{
+ BucketSlug: bucketSlug,
+ }
+ _, err := client.Packer.PackerServiceCreateBucket(createBktParams, nil)
+ if err == nil {
+ return
+ }
+ if err, ok := err.(*packer_service.PackerServiceCreateBucketDefault); ok {
+ switch err.Code() {
+ case int(codes.AlreadyExists), http.StatusConflict:
+ // all good here !
+ return
+ }
+ }
+
+ t.Errorf("unexpected CreateBucket error, expected nil or 409. Got %v", err)
+}
+
+func upsertIteration(t *testing.T, bucketSlug, fingerprint string) *models.HashicorpCloudPackerIteration {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ createItParams := packer_service.NewPackerServiceCreateIterationParams()
+ createItParams.LocationOrganizationID = loc.OrganizationID
+ createItParams.LocationProjectID = loc.ProjectID
+ createItParams.BucketSlug = bucketSlug
+ createItParams.Body = packer_service.PackerServiceCreateIterationBody{
+ Fingerprint: fingerprint,
+ }
+
+ iterationResp, err := client.Packer.PackerServiceCreateIteration(createItParams, nil)
+ if err == nil {
+ return iterationResp.Payload.Iteration
+ } else if err, ok := err.(*packer_service.PackerServiceCreateIterationDefault); ok {
+ switch err.Code() {
+ case int(codes.AlreadyExists), http.StatusConflict:
+ // all good here !
+ getItParams := packer_service.NewPackerServiceGetIterationParams()
+ getItParams.LocationOrganizationID = createItParams.LocationOrganizationID
+ getItParams.LocationProjectID = createItParams.LocationProjectID
+ getItParams.BucketSlug = createItParams.BucketSlug
+ getItParams.Fingerprint = &createItParams.Body.Fingerprint
+ iterationResp, err := client.Packer.PackerServiceGetIteration(getItParams, nil)
+ if err != nil {
+ t.Errorf("unexpected GetIteration error, expected nil. Got %v", err)
+ return nil
+ }
+ return iterationResp.Payload.Iteration
+ }
+ }
+
+ t.Errorf("unexpected CreateIteration error, expected nil or 409. Got %v", err)
+ return nil
+}
+
+func upsertCompleteIteration(t *testing.T, bucketSlug, fingerprint string) *models.HashicorpCloudPackerIteration {
+ iteration := upsertIteration(t, bucketSlug, fingerprint)
+ if t.Failed() || iteration == nil {
+ return nil
+ }
+ upsertBuild(t, bucketSlug, iteration.Fingerprint, iteration.ID)
+ if t.Failed() {
+ return nil
+ }
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+ iteration, err := clients.GetIterationFromFingerprint(context.Background(), client, loc, bucketSlug, iteration.Fingerprint)
+ if err != nil {
+ t.Errorf("Complete iteration not found after upserting, received unexpected error. Got %v", err)
+ return nil
+ }
+
+ return iteration
+}
+
+func revokeIteration(t *testing.T, iterationID, bucketSlug string, revokeAt strfmt.DateTime) {
+ t.Helper()
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ params := packer_service.NewPackerServiceUpdateIterationParams()
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ params.IterationID = iterationID
+ params.Body = packer_service.PackerServiceUpdateIterationBody{
+ BucketSlug: bucketSlug,
+ RevokeAt: revokeAt,
+ }
+
+ _, err := client.Packer.PackerServiceUpdateIteration(params, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func getIterationIDFromFingerPrint(t *testing.T, bucketSlug string, fingerprint string) (string, error) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ getItParams := packer_service.NewPackerServiceGetIterationParams()
+ getItParams.LocationOrganizationID = loc.OrganizationID
+ getItParams.LocationProjectID = loc.ProjectID
+ getItParams.BucketSlug = bucketSlug
+ getItParams.Fingerprint = &fingerprint
+
+ ok, err := client.Packer.PackerServiceGetIteration(getItParams, nil)
+ if err != nil {
+ return "", err
+ }
+ return ok.Payload.Iteration.ID, nil
+}
+
+func upsertBuild(t *testing.T, bucketSlug, fingerprint, iterationID string) {
+ client := testAccProvider.Meta().(*clients.Client)
+
+ createBuildParams := packer_service.NewPackerServiceCreateBuildParams()
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+ createBuildParams.LocationOrganizationID = loc.OrganizationID
+ createBuildParams.LocationProjectID = loc.ProjectID
+ createBuildParams.BucketSlug = bucketSlug
+ createBuildParams.IterationID = iterationID
+
+ status := models.HashicorpCloudPackerBuildStatusRUNNING
+ createBuildParams.Body = packer_service.PackerServiceCreateBuildBody{
+ Build: &models.HashicorpCloudPackerBuildCreateBody{
+ CloudProvider: "aws",
+ ComponentType: "amazon-ebs.example",
+ PackerRunUUID: uuid.New().String(),
+ Status: &status,
+ },
+ Fingerprint: fingerprint,
+ }
+
+ build, err := client.Packer.PackerServiceCreateBuild(createBuildParams, nil)
+ if err, ok := err.(*packer_service.PackerServiceCreateBuildDefault); ok {
+ switch err.Code() {
+ case int(codes.Aborted), http.StatusConflict:
+ // all good here !
+ return
+ }
+ }
+
+ if build == nil {
+ t.Errorf("unexpected CreateBuild error, expected non nil build response. Got %v", err)
+ return
+ }
+
+ // Iterations are currently only assigned an incremental version when publishing image metadata on update.
+ // Incremental versions are a requirement for assigning the channel.
+ updateBuildParams := packer_service.NewPackerServiceUpdateBuildParams()
+ updateBuildParams.LocationOrganizationID = loc.OrganizationID
+ updateBuildParams.LocationProjectID = loc.ProjectID
+ updateBuildParams.BuildID = build.Payload.Build.ID
+ updatesStatus := models.HashicorpCloudPackerBuildStatusDONE
+ updateBuildParams.Body = packer_service.PackerServiceUpdateBuildBody{
+ Updates: &models.HashicorpCloudPackerBuildUpdates{
+ Status: &updatesStatus,
+ Images: []*models.HashicorpCloudPackerImageCreateBody{
+ {
+ ImageID: "ami-42",
+ Region: "us-east-1",
+ },
+ {
+ ImageID: "ami-43",
+ Region: "us-east-2",
+ },
+ },
+ Labels: map[string]string{"test-key": "test-value"},
+ },
+ }
+ _, err = client.Packer.PackerServiceUpdateBuild(updateBuildParams, nil)
+ if err, ok := err.(*packer_service.PackerServiceUpdateBuildDefault); ok {
+ t.Errorf("unexpected UpdateBuild error, expected nil. Got %v", err)
+ }
+}
+
+func upsertChannel(t *testing.T, bucketSlug, channelSlug, iterationID string) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ createChParams := packer_service.NewPackerServiceCreateChannelParams()
+ createChParams.LocationOrganizationID = loc.OrganizationID
+ createChParams.LocationProjectID = loc.ProjectID
+ createChParams.BucketSlug = bucketSlug
+ createChParams.Body = packer_service.PackerServiceCreateChannelBody{
+ Slug: channelSlug,
+ IterationID: iterationID,
+ }
+
+ _, err := client.Packer.PackerServiceCreateChannel(createChParams, nil)
+ if err == nil {
+ return
+ }
+ if err, ok := err.(*packer_service.PackerServiceCreateChannelDefault); ok {
+ switch err.Code() {
+ case int(codes.AlreadyExists), http.StatusConflict:
+ // all good here !
+ updateChannelAssignment(t, bucketSlug, channelSlug, &models.HashicorpCloudPackerIteration{ID: iterationID})
+ return
+ }
+ }
+ t.Errorf("unexpected CreateChannel error, expected nil. Got %v", err)
+}
+
+func updateChannelAssignment(t *testing.T, bucketSlug string, channelSlug string, iteration *models.HashicorpCloudPackerIteration) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ params := packer_service.NewPackerServiceUpdateChannelParams()
+ params.LocationOrganizationID = loc.OrganizationID
+ params.LocationProjectID = loc.ProjectID
+ params.BucketSlug = bucketSlug
+ params.Slug = channelSlug
+
+ if iteration != nil {
+ switch {
+ case iteration.ID != "":
+ params.Body.IterationID = iteration.ID
+ case iteration.Fingerprint != "":
+ params.Body.Fingerprint = iteration.Fingerprint
+ case iteration.IncrementalVersion > 0:
+ params.Body.IncrementalVersion = iteration.IncrementalVersion
+ }
+ }
+
+ _, err := client.Packer.PackerServiceUpdateChannel(params, nil)
+ if err == nil {
+ return
+ }
+ t.Errorf("unexpected UpdateChannel error, expected nil. Got %v", err)
+}
+
+func deleteBucket(t *testing.T, bucketSlug string, logOnError bool) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ deleteBktParams := packer_service.NewPackerServiceDeleteBucketParams()
+ deleteBktParams.LocationOrganizationID = loc.OrganizationID
+ deleteBktParams.LocationProjectID = loc.ProjectID
+ deleteBktParams.BucketSlug = bucketSlug
+
+ _, err := client.Packer.PackerServiceDeleteBucket(deleteBktParams, nil)
+ if err == nil {
+ return
+ }
+ if logOnError {
+ t.Logf("unexpected DeleteBucket error, expected nil. Got %v", err)
+ }
+}
+
+func deleteIteration(t *testing.T, bucketSlug string, iterationFingerprint string, logOnError bool) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ iterationID, err := getIterationIDFromFingerPrint(t, bucketSlug, iterationFingerprint)
+ if err != nil {
+ if logOnError {
+ t.Logf(err.Error())
+ }
+ return
+ }
+
+ deleteItParams := packer_service.NewPackerServiceDeleteIterationParams()
+ deleteItParams.LocationOrganizationID = loc.OrganizationID
+ deleteItParams.LocationProjectID = loc.ProjectID
+ deleteItParams.BucketSlug = &bucketSlug
+ deleteItParams.IterationID = iterationID
+
+ _, err = client.Packer.PackerServiceDeleteIteration(deleteItParams, nil)
+ if err == nil {
+ return
+ }
+ if logOnError {
+ t.Logf("unexpected DeleteIteration error, expected nil. Got %v", err)
+ }
+}
+
+func deleteChannel(t *testing.T, bucketSlug string, channelSlug string, logOnError bool) {
+ t.Helper()
+
+ client := testAccProvider.Meta().(*clients.Client)
+ loc := &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }
+
+ deleteChParams := packer_service.NewPackerServiceDeleteChannelParams()
+ deleteChParams.LocationOrganizationID = loc.OrganizationID
+ deleteChParams.LocationProjectID = loc.ProjectID
+ deleteChParams.BucketSlug = bucketSlug
+ deleteChParams.Slug = channelSlug
+
+ _, err := client.Packer.PackerServiceDeleteChannel(deleteChParams, nil)
+ if err == nil {
+ return
+ }
+ if logOnError {
+ t.Logf("unexpected DeleteChannel error, expected nil. Got %v", err)
+ }
+}
diff --git a/internal/provider/test_helpers_test.go b/internal/provider/test_helpers_test.go
new file mode 100644
index 000000000..99ca28a6b
--- /dev/null
+++ b/internal/provider/test_helpers_test.go
@@ -0,0 +1,158 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package provider
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ sharedmodels "github.com/hashicorp/hcp-sdk-go/clients/cloud-shared/v1/models"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
+ "github.com/hashicorp/terraform-provider-hcp/internal/clients"
+)
+
+func testAccCheckFullURL(name, key, port string) resource.TestCheckFunc {
+ return func(s *terraform.State) error {
+ rs, ok := s.RootModule().Resources[name]
+ if !ok {
+ return fmt.Errorf("not found: %s", name)
+ }
+
+ ep := rs.Primary.Attributes[key]
+
+ if !strings.HasPrefix(ep, "https://") {
+ return fmt.Errorf("URL missing scheme")
+ }
+
+ if port != "" {
+ if !strings.HasSuffix(ep, fmt.Sprintf(":%s", port)) {
+ return fmt.Errorf("URL missing port")
+ }
+ }
+
+ return nil
+ }
+}
+
+// If the resource is not found, the value will be nil and an error is returned.
+// If the attribute is not found, the value will be a blank string, but an error will still be returned.
+func testAccGetAttributeFromResourceInState(resourceName string, attribute string, state *terraform.State) (*string, error) {
+ resources := state.RootModule().Resources
+
+ resource, ok := resources[resourceName]
+ if !ok {
+ return nil, fmt.Errorf("Resource %q not found in the present state", resourceName)
+ }
+
+ value, ok := resource.Primary.Attributes[attribute]
+ if !ok {
+ return &value, fmt.Errorf("Resource %q does not have an attribute named %q in the present state", resourceName, attribute)
+ }
+
+ return &value, nil
+}
+
+// Returns a best-effort location from the state of a given resource.
+// Will return the default location even if the resource isn't found.
+func testAccGetLocationFromState(resourceName string, state *terraform.State) (*sharedmodels.HashicorpCloudLocationLocation, error) {
+
+ client := testAccProvider.Meta().(*clients.Client)
+
+ projectIDFromState, _ := testAccGetAttributeFromResourceInState(resourceName, "project_id", state)
+ if projectIDFromState == nil {
+ return &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: client.Config.ProjectID,
+ }, fmt.Errorf("Resource %q not found in the present state", resourceName)
+ }
+
+ projectID, _ := GetProjectID(*projectIDFromState, client.Config.OrganizationID)
+
+ return &sharedmodels.HashicorpCloudLocationLocation{
+ OrganizationID: client.Config.OrganizationID,
+ ProjectID: projectID,
+ }, nil
+}
+
+func testAccCreateSlug(testName string) string {
+ suffix := fmt.Sprintf("-%s", time.Now().Format("0601021504"))
+ return fmt.Sprintf("%.*s%s", 36-len(suffix), testName, suffix)
+}
+
+// TODO: Add support for blocks
+type testAccConfigBuilderInterface interface {
+ IsData() bool
+ ResourceType() string
+ UniqueName() string
+ ResourceName() string
+ AttributeRef(string) string
+ Attributes() map[string]string
+}
+
+func testAccConfigBuildersToString(builders ...testAccConfigBuilderInterface) string {
+ config := ""
+
+ for _, cb := range builders {
+ rOrD := "resource"
+ if cb.IsData() {
+ rOrD = "data"
+ }
+
+ attributesString := ""
+ for key, value := range cb.Attributes() {
+ if key != "" && value != "" {
+ attributesString += fmt.Sprintf(" %s = %s\n", key, value)
+ }
+ }
+
+ config += fmt.Sprintf(`
+%s %q %q {
+%s
+}
+`, rOrD, cb.ResourceType(), cb.UniqueName(), attributesString)
+ }
+ return config
+}
+
+type testAccConfigBuilder struct {
+ isData bool
+ resourceType string
+ uniqueName string
+ attributes map[string]string
+ // Attribute values must be as they would be in the config file.
+ // Ex: "value" can be represented in Go with `"value"` or fmt.Sprintf("%q", "value")
+ // An empty string is equivalent to the attribute not being present in the map.
+}
+
+var _ testAccConfigBuilderInterface = testAccConfigBuilder{}
+
+func (b testAccConfigBuilder) IsData() bool {
+ return b.isData
+}
+
+func (b testAccConfigBuilder) ResourceType() string {
+ return b.resourceType
+}
+
+func (b testAccConfigBuilder) UniqueName() string {
+ return b.uniqueName
+}
+
+func (b testAccConfigBuilder) ResourceName() string {
+ if b.isData {
+ return fmt.Sprintf("data.%s.%s", b.ResourceType(), b.UniqueName())
+ }
+
+ return fmt.Sprintf("%s.%s", b.ResourceType(), b.UniqueName())
+}
+
+func (b testAccConfigBuilder) Attributes() map[string]string {
+ return b.attributes
+}
+
+func (b testAccConfigBuilder) AttributeRef(path string) string {
+ return fmt.Sprintf("%s.%s", b.ResourceName(), path)
+}
diff --git a/templates/data-sources/packer_image_iteration.md.tmpl b/templates/data-sources/packer_image_iteration.md.tmpl
index 973c7c2f0..e34c125c2 100644
--- a/templates/data-sources/packer_image_iteration.md.tmpl
+++ b/templates/data-sources/packer_image_iteration.md.tmpl
@@ -17,7 +17,7 @@ description: |-
### Required
-- `bucket_name` (String) The slug of the HCP Packer Registry image bucket to pull from.
+- `bucket_name` (String) The slug of the HCP Packer Registry bucket to pull from.
- `channel` (String) The channel that points to the version of the image you want.
### Optional
diff --git a/templates/guides/packer-channel-management.md.tmpl b/templates/guides/packer-channel-management.md.tmpl
new file mode 100644
index 000000000..29d88a037
--- /dev/null
+++ b/templates/guides/packer-channel-management.md.tmpl
@@ -0,0 +1,19 @@
+---
+subcategory: ""
+page_title: "Advanced Packer Channel Management - HCP Provider"
+description: |-
+ A guide to integreting HCP Packer resources and data sources for more advanced channel management.
+---
+
+# Advanced Packer Channel Management
+
+You can integrate multiple HCP Packer resources and data sources to perform advanced channel management tasks.
+
+## Setting the channel assignment on a Terraform-managed channel
+
+{{ tffile "examples/guides/packer_channel_management/main.tf" }}
+
+## Setting the channel assignment to the latest complete iteration
+
+{{ tffile "examples/guides/packer_channel_management/assign_latest.tf" }}
+
diff --git a/templates/resources/packer_channel.md.tmpl b/templates/resources/packer_channel.md.tmpl
deleted file mode 100644
index c94d53126..000000000
--- a/templates/resources/packer_channel.md.tmpl
+++ /dev/null
@@ -1,30 +0,0 @@
----
-page_title: "{{.Type}} {{.Name}} - {{.ProviderName}}"
-subcategory: ""
-description: |-
-{{ .Description | plainmarkdown | trimspace | prefixlines " " }}
----
-
-# {{.Name}} ({{.Type}})
-
-{{ .Description | trimspace }}
-
-## Example Usage
-
-To create a channel.
-{{ tffile "examples/resources/hcp_packer_channel/resource.tf" }}
-
-To create a channel with iteration assignment managed by Terraform.
-{{ tffile "examples/resources/hcp_packer_channel/resource_assignment.tf" }}
-
-Using the latest channel to create a new channel with the latest complete iteration assigned.
-{{ tffile "examples/resources/hcp_packer_channel/resource_using_latest_channel.tf" }}
-
-
-{{ .SchemaMarkdown | trimspace }}
-
-## Import
-
-Import is supported using the following syntax:
-
-{{ codefile "shell" "examples/resources/hcp_packer_channel/import.sh" }}