diff --git a/.copywrite.hcl b/.copywrite.hcl index 571d04dc960..1411fc6533f 100644 --- a/.copywrite.hcl +++ b/.copywrite.hcl @@ -2,7 +2,7 @@ schema_version = 1 project { license = "MPL-2.0" - copyright_year = 2020 + copyright_year = 2023 header_ignore = [ ".github/**", diff --git a/.github/workflows/actionlint.yml b/.github/workflows/actionlint.yml index ff718bd6e7e..dca8df38af4 100644 --- a/.github/workflows/actionlint.yml +++ b/.github/workflows/actionlint.yml @@ -13,6 +13,6 @@ jobs: steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - name: Check workflow files - uses: docker://docker.mirror.hashicorp.services/rhysd/actionlint@sha256:3f24bf9d72ca67af6f9f8f3cc63b0e24621b57bf421cecfc452c3312e32b68cc # 1.6.24 + uses: docker://docker.mirror.hashicorp.services/rhysd/actionlint@sha256:02ccb6d91e4cb4a7b21eb99d5274d257e81ae667688d730e89d7ea0d6d35db91 with: args: -color -ignore SC2129 -ignore "'property \"download-path\" is not defined in object type'" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f37b8cad6d0..649b8d4fb63 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,13 +1,9 @@ name: build on: - push: - branches: - - '**' - tags-ignore: - - '**' - workflow_call: - workflow_dispatch: + - workflow_dispatch + - push + - workflow_call env: PKG_NAME: "boundary" @@ -403,6 +399,7 @@ jobs: needs: - set-product-version - product-metadata + - build-linux - build-docker uses: ./.github/workflows/enos-run.yml with: diff --git a/.github/workflows/check-legacy-links-format.yml b/.github/workflows/check-legacy-links-format.yml new file mode 100644 index 00000000000..41787747fb0 --- /dev/null +++ b/.github/workflows/check-legacy-links-format.yml @@ -0,0 +1,22 @@ +name: Legacy Link Format Checker + +on: + push: + paths: + - "website/content/**/*.mdx" + - "website/data/*-nav-data.json" + - ".github/workflows/check-legacy-links-format.yml" + +permissions: + contents: read + +jobs: + check-links: + if: github.repository == 'hashicorp/boundary' + uses: hashicorp/dev-portal/.github/workflows/docs-content-check-legacy-links-format.yml@475289345d312552b745224b46895f51cc5fc490 + with: + repo-owner: "hashicorp" + repo-name: "boundary" + commit-sha: ${{ github.sha }} + mdx-directory: "website/content" + nav-data-directory: "website/data" diff --git a/.github/workflows/enos-run.yml b/.github/workflows/enos-run.yml index 8272ec4edb9..5494740cc35 100644 --- a/.github/workflows/enos-run.yml +++ b/.github/workflows/enos-run.yml @@ -78,9 +78,10 @@ jobs: include: - filter: 'e2e_aws builder:crt' - filter: 'e2e_database' - - filter: 'e2e_docker_base builder:crt' - - filter: 'e2e_docker_base_with_vault builder:crt' - runs-on: ${{ fromJSON(vars.RUNNER_LARGE) }} + - filter: 'e2e_static builder:crt' + - filter: 'e2e_static_with_vault builder:crt' + # - filter: 'e2e_ui builder:crt' # Don't run UI tests yet. takes too long. + runs-on: ${{ fromJSON(vars.RUNNER) }} env: GITHUB_TOKEN: ${{ secrets.SERVICE_USER_GITHUB_TOKEN }} # Scenario variables @@ -181,21 +182,14 @@ jobs: run: | wget https://releases.hashicorp.com/vault/1.12.2/vault_1.12.2_linux_amd64.zip -O /tmp/test-deps/vault.zip - name: Install Vault CLI - if: matrix.filter == 'e2e_aws_base_with_vault builder:crt' || matrix.filter == 'e2e_database' || matrix.filter == 'e2e_ui builder:crt' || matrix.filter == 'e2e_docker_base_with_vault builder:crt' + if: matrix.filter == 'e2e_static_with_vault builder:crt' || matrix.filter == 'e2e_database' || matrix.filter == 'e2e_ui builder:crt' || matrix.filter == 'e2e_docker builder:crt' run: | unzip /tmp/test-deps/vault.zip -d /usr/local/bin - name: Add hosts to /etc/hosts # This enables the use of `boundary connect` with docker containers - if: contains(matrix.filter, 'e2e_docker') + if: matrix.filter == 'e2e_docker builder:crt' run: | sudo echo "127.0.0.1 localhost boundary" | sudo tee -a /etc/hosts - - name: GH fix for localhost resolution - if: github.repository == 'hashicorp/boundary' && contains(matrix.filter, 'e2e_docker') - run: | - cat /etc/hosts && echo "-----------" - sudo sed -i 's/::1 *localhost ip6-localhost ip6-loopback/::1 ip6 -localhost ip6-loopback/g' /etc/hosts - cat /etc/hosts - ssh -V - name: Download Boundary Linux AMD64 bundle id: download uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 @@ -207,14 +201,14 @@ jobs: unzip ${{steps.download.outputs.download-path}}/*.zip -d enos/support/boundary mv ${{steps.download.outputs.download-path}}/*.zip enos/support/boundary.zip - name: Download Boundary Linux AMD64 docker image - if: contains(matrix.filter, 'e2e_docker') + if: matrix.filter == 'e2e_docker builder:crt' uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 id: download-docker with: name: ${{ inputs.docker-image-file }} path: ./enos/support/downloads - name: Rename docker image file - if: contains(matrix.filter, 'e2e_docker') + if: matrix.filter == 'e2e_docker builder:crt' run: | mv ${{ steps.download-docker.outputs.download-path }}/*.tar enos/support/boundary_docker_image.tar - name: Set up Node.js diff --git a/.github/workflows/make-gen-delta.yml b/.github/workflows/make-gen-delta.yml index 457a812c3d6..6aba62468f6 100644 --- a/.github/workflows/make-gen-delta.yml +++ b/.github/workflows/make-gen-delta.yml @@ -9,7 +9,7 @@ permissions: jobs: make-gen-delta: - name: "Check for uncommitted changes from make gen" + name: "Check for uncommited changes from make gen" runs-on: ${{ fromJSON(vars.RUNNER) }} steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 @@ -26,9 +26,6 @@ jobs: uses: actions/setup-go@4d34df0c2316fe8122ab82dc22947d607c0c91f9 # v4.0.0 with: go-version: "${{ steps.get-go-version.outputs.go-version }}" - - name: Running go mod tidy - run: | - go mod tidy - name: Install Dependencies run: | make tools diff --git a/.release/linux/package/etc/boundary.d/boundary.hcl b/.release/linux/package/etc/boundary.d/boundary.hcl index bef65730484..b5d0766c9b6 100644 --- a/.release/linux/package/etc/boundary.d/boundary.hcl +++ b/.release/linux/package/etc/boundary.d/boundary.hcl @@ -1,71 +1,3 @@ -# # Note that this is an example config file and is not intended to be functional as-is. -# # Full configuration options can be found at https://www.boundaryproject.io/docs/configuration/controller +# # Full configuration options can be found at https://www.boundaryproject.io/docs/configuration -# # Disable memory lock: https://www.man7.org/linux/man-pages/man2/mlock.2.html # disable_mlock = true - -# # Controller configuration block -# controller { -# # This name attr must be unique across all controller instances if running in HA mode -# name = "demo-controller-1" -# description = "A controller for a demo!" - -# # Database URL for postgres. This can be a direct "postgres://" -# # URL, or it can be "file://" to read the contents of a file to -# # supply the url, or "env://" to name an environment variable -# # that contains the URL. -# database { -# url = "postgresql://boundary:boundarydemo@postgres.yourdomain.com:5432/boundary" -# } -# } - -# # API listener configuration block -# listener "tcp" { -# # Should be the address of the NIC that the controller server will be reached on -# address = "10.0.0.1" -# # The purpose of this listener block -# purpose = "api" - -# tls_disable = false - -# # Uncomment to enable CORS for the Admin UI. Be sure to set the allowed origin(s) -# # to appropriate values. -# #cors_enabled = true -# #cors_allowed_origins = ["https://yourcorp.yourdomain.com", "serve://boundary"] -# } - -# # Data-plane listener configuration block (used for worker coordination) -# listener "tcp" { -# # Should be the IP of the NIC that the worker will connect on -# address = "10.0.0.1" -# # The purpose of this listener -# purpose = "cluster" -# } - -# # Root KMS configuration block: this is the root key for Boundary -# # Use a production KMS such as AWS KMS in production installs -# kms "aead" { -# purpose = "root" -# aead_type = "aes-gcm" -# key = "sP1fnF5Xz85RrXyELHFeZg9Ad2qt4Z4bgNHVGtD6ung=" -# key_id = "global_root" -# } - -# # Worker authorization KMS -# # Use a production KMS such as AWS KMS for production installs -# # This key is the same key used in the worker configuration -# kms "aead" { -# purpose = "worker-auth" -# aead_type = "aes-gcm" -# key = "8fZBjCUfN0TzjEGLQldGY4+iE9AkOvCfjh7+p0GtRBQ=" -# key_id = "global_worker-auth" -# } - -# # Recovery KMS block: configures the recovery key for Boundary -# # Use a production KMS such as AWS KMS for production installs -# kms "aead" { -# purpose = "recovery" -# aead_type = "aes-gcm" -# key = "8fZBjCUfN0TzjEGLQldGY4+iE9AkOvCfjh7+p0GtRBQ=" -# key_id = "global_recovery" -# } diff --git a/.release/linux/package/etc/boundary.d/controller.hcl b/.release/linux/package/etc/boundary.d/controller.hcl new file mode 100644 index 00000000000..1b7b57a0f8f --- /dev/null +++ b/.release/linux/package/etc/boundary.d/controller.hcl @@ -0,0 +1,71 @@ +# # Note that this is an example systemd file and is not intended to be functional as-is. +# # Full configuration options can be found at https://www.boundaryproject.io/docs/configuration/controller + +# # Disable memory lock: https://www.man7.org/linux/man-pages/man2/mlock.2.html +# # disable_mlock = true + +# # Controller configuration block +# controller { +# # This name attr must be unique across all controller instances if running in HA mode +# name = "demo-controller-1" +# description = "A controller for a demo!" + +# # Database URL for postgres. This can be a direct "postgres://" +# # URL, or it can be "file://" to read the contents of a file to +# # supply the url, or "env://" to name an environment variable +# # that contains the URL. +# database { +# url = "postgresql://boundary:boundarydemo@postgres.yourdomain.com:5432/boundary" +# } +# } + +# # API listener configuration block +# listener "tcp" { +# # Should be the address of the NIC that the controller server will be reached on +# address = "10.0.0.1" +# # The purpose of this listener block +# purpose = "api" + +# tls_disable = false + +# # Uncomment to enable CORS for the Admin UI. Be sure to set the allowed origin(s) +# # to appropriate values. +# #cors_enabled = true +# #cors_allowed_origins = ["https://yourcorp.yourdomain.com", "serve://boundary"] +# } + +# # Data-plane listener configuration block (used for worker coordination) +# listener "tcp" { +# # Should be the IP of the NIC that the worker will connect on +# address = "10.0.0.1" +# # The purpose of this listener +# purpose = "cluster" +# } + +# # Root KMS configuration block: this is the root key for Boundary +# # Use a production KMS such as AWS KMS in production installs +# kms "aead" { +# purpose = "root" +# aead_type = "aes-gcm" +# key = "sP1fnF5Xz85RrXyELHFeZg9Ad2qt4Z4bgNHVGtD6ung=" +# key_id = "global_root" +# } + +# # Worker authorization KMS +# # Use a production KMS such as AWS KMS for production installs +# # This key is the same key used in the worker configuration +# kms "aead" { +# purpose = "worker-auth" +# aead_type = "aes-gcm" +# key = "8fZBjCUfN0TzjEGLQldGY4+iE9AkOvCfjh7+p0GtRBQ=" +# key_id = "global_worker-auth" +# } + +# # Recovery KMS block: configures the recovery key for Boundary +# # Use a production KMS such as AWS KMS for production installs +# kms "aead" { +# purpose = "recovery" +# aead_type = "aes-gcm" +# key = "8fZBjCUfN0TzjEGLQldGY4+iE9AkOvCfjh7+p0GtRBQ=" +# key_id = "global_recovery" +# } diff --git a/.release/linux/package/etc/boundary.d/worker.hcl b/.release/linux/package/etc/boundary.d/worker.hcl index 5ac07e935af..0c095931364 100644 --- a/.release/linux/package/etc/boundary.d/worker.hcl +++ b/.release/linux/package/etc/boundary.d/worker.hcl @@ -1,4 +1,4 @@ -# # Note that this is an example config file and is not intended to be functional as-is. +# # Note that this is an example systemd file and is not intended to be functional as-is. # # Full configuration options can be found at https://www.boundaryproject.io/docs/configuration/worker # listener "tcp" { diff --git a/.release/linux/package/usr/lib/systemd/system/boundary.service b/.release/linux/package/usr/lib/systemd/system/boundary.service index 617f5617962..9682b79afc7 100644 --- a/.release/linux/package/usr/lib/systemd/system/boundary.service +++ b/.release/linux/package/usr/lib/systemd/system/boundary.service @@ -10,7 +10,7 @@ User=boundary Group=boundary ProtectSystem=full ProtectHome=read-only -ExecStart=/usr/bin/boundary server -config=/etc/boundary.d/boundary.hcl +ExecStart=/usr/bin/boundary server -config=/etc/boundary.d/%i.hcl ExecReload=/bin/kill --signal HUP $MAINPID KillMode=process KillSignal=SIGINT @@ -20,4 +20,4 @@ TimeoutStopSec=30 LimitMEMLOCK=infinity [Install] -WantedBy=multi-user.target +WantedBy=multi-user.target \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 44512be51f6..5e695f65c07 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,60 +2,10 @@ Canonical reference for changes, improvements, and bugfixes for Boundary. -## 0.13.1 (2023/07/10) +## Next ### New and Improved -* roles: In grants, the `id` field has been changed to `ids` (but `id` will - still be accepted for now, up until 0.15.0). In the `ids` field, multiple IDs - can now be specified in a grant, either via commas (text format) or array - (JSON format). ([PR](https://github.com/hashicorp/boundary/pull/3263)). -* dev environment: When running `boundary dev` the initial LDAP auth-method with an - ID of `amldap_1234567890` is now in a public-active state, so it will be returned - in the response from `boundary auth-methods list` - -### Deprecations/Changes - -* Grants can now accept more than one ID per grant string (or entry in JSON) via - the `ids` parameter. In 0.15.0 the ability to add new grants via the `id` - parameter will be removed. - -### Bug Fixes - -* PKI worker authentication: A worker authentication record can be stored more than once, if it matches the - existing record for that worker auth key ID. Fixes an edge case where a worker attempted authorization - and the controller successfully stored the worker auth record but went down before returning authorization - details to the worker. ([PR](https://github.com/hashicorp/boundary/pull/3389)) -* LDAP managed groups: adding/setting/removing a principal to a role now works - properly when it's an LDAP managed group. - ([PR](https://github.com/hashicorp/boundary/pull/3361) and - [PR](https://github.com/hashicorp/boundary/pull/3363)) - -## 0.13.0 (2023/06/13) - -### New and Improved - -* SSH Session Recordings (Enterprise and HCP Boundary only): SSH targets can now - be configured to record sessions. Recordings are signed and stored in a - Storage Bucket. Recordings can be played back in the admin UI. - * Storage Buckets: This release introduces Storage Buckets, a Boundary - resource that represents a bucket in an external object store. Storage - Buckets can be defined at the global or org scope. When associated with an - SSH target, the storage bucket is used to store session recordings. This - release includes support for AWS S3 only. - * BSR (Boundary Session Recording) file format: BSR is a new specification - that defines a hierarchical directory structure of files and a binary file - format. The contents of a BSR include all data transmitted between a user - and a target during a single session, relevant session metadata and summary - information. The BSR also includes checksum and signature files for - cryptographically verifying BSR contents, and a set of KMS wrapped keys for - use in BSR verification. The BSR format is intended to be extensible to - support various protocols. With this release BSR supports the SSH protocol. - It also supports converting an SSH channel recording into an - [asciicast](https://github.com/asciinema/asciinema/blob/develop/doc/asciicast-v2.md) - format that is playable by asciinema. - * To learn more about this new feature, refer to the - [documentation](http://developer.hashicorp.com/boundary/docs/configuration/session-recording). * KMS workers: KMS workers now have feature parity with PKI workers (they support multi-hop and Vault private access) and support separate KMSes for authenticating downstreams across different networks. See the [worker @@ -75,9 +25,7 @@ Canonical reference for changes, improvements, and bugfixes for Boundary. ([PR](https://github.com/hashicorp/boundary/pull/2912)) * ui: Display external names when listing dynamic hosts ([PR](https://github.com/hashicorp/boundary-ui/pull/1664)) * ui: Add support for LDAP authentication ([PR](https://github.com/hashicorp/boundary-ui/pull/1645)) -* Dynamic Host Catalog: You can now view the AWS or Azure host name when listing hosts in CLI, - admin console, and desktop client. ([PR](https://github.com/hashicorp/boundary/pull/3074)) -* Add configuration for license reporting (Enterprise only) +* Dynamic Host Catalog: You can now view the AWS or Azure host name when listing hosts in CLI, admin console, and desktop client. ([PR](https://github.com/hashicorp/boundary/pull/3074)) ### Deprecations/Changes @@ -104,13 +52,13 @@ Canonical reference for changes, improvements, and bugfixes for Boundary. incorrectly being generated for auth token resources, which do not support versioning. This is technically a breaking change, but it was a no-op option anyways that there was no reason to be using. It has now been removed. -* Plugins: With the introduction of the storage plugin service, the Azure and AWS Host plugin +* Plugins: With the introduction of new plugin services, the Azure and AWS Host plugin repositories have been renamed to drop the `host` element of the repository name: - https://github.com/hashicorp/boundary-plugin-host-aws -> https://github.com/hashicorp/boundary-plugin-aws - https://github.com/hashicorp/boundary-plugin-host-azure -> https://github.com/hashicorp/boundary-plugin-azure - Similarly the `plugins/host` package has been renamed to `plugins/boundary` + similarly the `plugins/host` package has been renamed to `plugins/boundary` ([PR1](https://github.com/hashicorp/boundary/pull/3262), [PR2](https://github.com/hashicorp/boundary-plugin-aws/pull/24), [PR3](https://github.com/hashicorp/boundary-plugin-azure/pull/12), diff --git a/api/authtokens/authtokens.gen.go b/api/authtokens/authtokens.gen.go index d0fee9dbdf1..f95ea76f610 100644 --- a/api/authtokens/authtokens.gen.go +++ b/api/authtokens/authtokens.gen.go @@ -44,6 +44,7 @@ func (n AuthTokenReadResult) GetResponse() *api.Response { return n.response } +type AuthTokenCreateResult = AuthTokenReadResult type AuthTokenUpdateResult = AuthTokenReadResult type AuthTokenDeleteResult struct { diff --git a/api/credentiallibraries/vault_credential_library_attributes.gen.go b/api/credentiallibraries/vault_credential_library_attributes.gen.go index 1d6ae4a5842..916f3d596bf 100644 --- a/api/credentiallibraries/vault_credential_library_attributes.gen.go +++ b/api/credentiallibraries/vault_credential_library_attributes.gen.go @@ -35,8 +35,8 @@ func AttributesMapToVaultCredentialLibraryAttributes(in map[string]interface{}) } func (pt *CredentialLibrary) GetVaultCredentialLibraryAttributes() (*VaultCredentialLibraryAttributes, error) { - if pt.Type != "vault-generic" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-library is of type %s", "vault-generic", pt.Type) + if pt.Type != "vault" { + return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-library is of type %s", "vault", pt.Type) } return AttributesMapToVaultCredentialLibraryAttributes(pt.Attributes) } diff --git a/api/credentiallibraries/vault_ssh_certificate_credential_library_attributes.gen.go b/api/credentiallibraries/vault_ssh_certificate_credential_library_attributes.gen.go index 6316ec158a2..784d025977d 100644 --- a/api/credentiallibraries/vault_ssh_certificate_credential_library_attributes.gen.go +++ b/api/credentiallibraries/vault_ssh_certificate_credential_library_attributes.gen.go @@ -40,8 +40,8 @@ func AttributesMapToVaultSSHCertificateCredentialLibraryAttributes(in map[string } func (pt *CredentialLibrary) GetVaultSSHCertificateCredentialLibraryAttributes() (*VaultSSHCertificateCredentialLibraryAttributes, error) { - if pt.Type != "vault-ssh-certificate" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-library is of type %s", "vault-ssh-certificate", pt.Type) + if pt.Type != "vaultsshcertificate" { + return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-library is of type %s", "vaultsshcertificate", pt.Type) } return AttributesMapToVaultSSHCertificateCredentialLibraryAttributes(pt.Attributes) } diff --git a/api/credentials/ssh_private_key_attributes.gen.go b/api/credentials/ssh_private_key_attributes.gen.go index c473d75314c..26e8466a9ae 100644 --- a/api/credentials/ssh_private_key_attributes.gen.go +++ b/api/credentials/ssh_private_key_attributes.gen.go @@ -37,8 +37,8 @@ func AttributesMapToSshPrivateKeyAttributes(in map[string]interface{}) (*SshPriv } func (pt *Credential) GetSshPrivateKeyAttributes() (*SshPrivateKeyAttributes, error) { - if pt.Type != "ssh_private_key" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "ssh_private_key", pt.Type) + if pt.Type != "sshprivatekey" { + return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "sshprivatekey", pt.Type) } return AttributesMapToSshPrivateKeyAttributes(pt.Attributes) } diff --git a/api/credentials/username_password_attributes.gen.go b/api/credentials/username_password_attributes.gen.go index a79715bd545..0a3dd4cdd00 100644 --- a/api/credentials/username_password_attributes.gen.go +++ b/api/credentials/username_password_attributes.gen.go @@ -35,8 +35,8 @@ func AttributesMapToUsernamePasswordAttributes(in map[string]interface{}) (*User } func (pt *Credential) GetUsernamePasswordAttributes() (*UsernamePasswordAttributes, error) { - if pt.Type != "username_password" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "username_password", pt.Type) + if pt.Type != "usernamepassword" { + return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "usernamepassword", pt.Type) } return AttributesMapToUsernamePasswordAttributes(pt.Attributes) } diff --git a/api/roles/grant_json.gen.go b/api/roles/grant_json.gen.go index f2d4c069b00..6612a5cf10f 100644 --- a/api/roles/grant_json.gen.go +++ b/api/roles/grant_json.gen.go @@ -6,7 +6,6 @@ package roles type GrantJson struct { Id string `json:"id,omitempty"` - Ids []string `json:"ids,omitempty"` Type string `json:"type,omitempty"` Actions []string `json:"actions,omitempty"` } diff --git a/api/sessionrecordings/credential.gen.go b/api/sessionrecordings/credential.gen.go deleted file mode 100644 index 00571450f08..00000000000 --- a/api/sessionrecordings/credential.gen.go +++ /dev/null @@ -1,15 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -type Credential struct { - Id string `json:"id,omitempty"` - CredentialStore *CredentialStore `json:"credential_store,omitempty"` - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Purposes []string `json:"purposes,omitempty"` - Type string `json:"type,omitempty"` - Attributes map[string]interface{} `json:"attributes,omitempty"` -} diff --git a/api/sessionrecordings/credential_library.gen.go b/api/sessionrecordings/credential_library.gen.go deleted file mode 100644 index f74db0ab6eb..00000000000 --- a/api/sessionrecordings/credential_library.gen.go +++ /dev/null @@ -1,15 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -type CredentialLibrary struct { - Id string `json:"id,omitempty"` - CredentialStore *CredentialStore `json:"credential_store,omitempty"` - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Purposes []string `json:"purposes,omitempty"` - Type string `json:"type,omitempty"` - Attributes map[string]interface{} `json:"attributes,omitempty"` -} diff --git a/api/sessionrecordings/credential_store.gen.go b/api/sessionrecordings/credential_store.gen.go deleted file mode 100644 index 0c8ecfa9c11..00000000000 --- a/api/sessionrecordings/credential_store.gen.go +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -type CredentialStore struct { - Id string `json:"id,omitempty"` - ScopeId string `json:"scope_id,omitempty"` - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Type string `json:"type,omitempty"` - Attributes map[string]interface{} `json:"attributes,omitempty"` -} diff --git a/api/sessionrecordings/json_credential_attributes.gen.go b/api/sessionrecordings/json_credential_attributes.gen.go deleted file mode 100644 index d5e801b9ba4..00000000000 --- a/api/sessionrecordings/json_credential_attributes.gen.go +++ /dev/null @@ -1,40 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -import ( - "fmt" - - "github.com/mitchellh/mapstructure" -) - -type JsonCredentialAttributes struct { - ObjectHmac string `json:"object_hmac,omitempty"` -} - -func AttributesMapToJsonCredentialAttributes(in map[string]interface{}) (*JsonCredentialAttributes, error) { - if in == nil { - return nil, fmt.Errorf("nil input map") - } - var out JsonCredentialAttributes - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - TagName: "json", - }) - if err != nil { - return nil, fmt.Errorf("error creating mapstructure decoder: %w", err) - } - if err := dec.Decode(in); err != nil { - return nil, fmt.Errorf("error decoding: %w", err) - } - return &out, nil -} - -func (pt *Credential) GetJsonCredentialAttributes() (*JsonCredentialAttributes, error) { - if pt.Type != "json" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "json", pt.Type) - } - return AttributesMapToJsonCredentialAttributes(pt.Attributes) -} diff --git a/api/sessionrecordings/ssh_private_key_credential_attributes.gen.go b/api/sessionrecordings/ssh_private_key_credential_attributes.gen.go deleted file mode 100644 index dd0293bfe7c..00000000000 --- a/api/sessionrecordings/ssh_private_key_credential_attributes.gen.go +++ /dev/null @@ -1,42 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -import ( - "fmt" - - "github.com/mitchellh/mapstructure" -) - -type SshPrivateKeyCredentialAttributes struct { - Username string `json:"username,omitempty"` - PrivateKeyHmac string `json:"private_key_hmac,omitempty"` - PrivateKeyPassphraseHmac string `json:"private_key_passphrase_hmac,omitempty"` -} - -func AttributesMapToSshPrivateKeyCredentialAttributes(in map[string]interface{}) (*SshPrivateKeyCredentialAttributes, error) { - if in == nil { - return nil, fmt.Errorf("nil input map") - } - var out SshPrivateKeyCredentialAttributes - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - TagName: "json", - }) - if err != nil { - return nil, fmt.Errorf("error creating mapstructure decoder: %w", err) - } - if err := dec.Decode(in); err != nil { - return nil, fmt.Errorf("error decoding: %w", err) - } - return &out, nil -} - -func (pt *Credential) GetSshPrivateKeyCredentialAttributes() (*SshPrivateKeyCredentialAttributes, error) { - if pt.Type != "ssh_private_key" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "ssh_private_key", pt.Type) - } - return AttributesMapToSshPrivateKeyCredentialAttributes(pt.Attributes) -} diff --git a/api/sessionrecordings/username_password_credential_attributes.gen.go b/api/sessionrecordings/username_password_credential_attributes.gen.go deleted file mode 100644 index c9860c6bdc0..00000000000 --- a/api/sessionrecordings/username_password_credential_attributes.gen.go +++ /dev/null @@ -1,41 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -import ( - "fmt" - - "github.com/mitchellh/mapstructure" -) - -type UsernamePasswordCredentialAttributes struct { - Username string `json:"username,omitempty"` - PasswordHmac string `json:"password_hmac,omitempty"` -} - -func AttributesMapToUsernamePasswordCredentialAttributes(in map[string]interface{}) (*UsernamePasswordCredentialAttributes, error) { - if in == nil { - return nil, fmt.Errorf("nil input map") - } - var out UsernamePasswordCredentialAttributes - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - TagName: "json", - }) - if err != nil { - return nil, fmt.Errorf("error creating mapstructure decoder: %w", err) - } - if err := dec.Decode(in); err != nil { - return nil, fmt.Errorf("error decoding: %w", err) - } - return &out, nil -} - -func (pt *Credential) GetUsernamePasswordCredentialAttributes() (*UsernamePasswordCredentialAttributes, error) { - if pt.Type != "username_password" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential is of type %s", "username_password", pt.Type) - } - return AttributesMapToUsernamePasswordCredentialAttributes(pt.Attributes) -} diff --git a/api/sessionrecordings/values_at_time.gen.go b/api/sessionrecordings/values_at_time.gen.go index 7e8c0dbc5b1..d39bbe220f8 100644 --- a/api/sessionrecordings/values_at_time.gen.go +++ b/api/sessionrecordings/values_at_time.gen.go @@ -5,9 +5,7 @@ package sessionrecordings type ValuesAtTime struct { - User *User `json:"user,omitempty"` - Target *Target `json:"target,omitempty"` - Host *Host `json:"host,omitempty"` - Credentials []*Credential `json:"credentials,omitempty"` - CredentialLibraries []*CredentialLibrary `json:"credential_libraries,omitempty"` + User *User `json:"user,omitempty"` + Target *Target `json:"target,omitempty"` + Host *Host `json:"host,omitempty"` } diff --git a/api/sessionrecordings/vault_credential_library_attributes.gen.go b/api/sessionrecordings/vault_credential_library_attributes.gen.go deleted file mode 100644 index 0ff0a7016ca..00000000000 --- a/api/sessionrecordings/vault_credential_library_attributes.gen.go +++ /dev/null @@ -1,42 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -import ( - "fmt" - - "github.com/mitchellh/mapstructure" -) - -type VaultCredentialLibraryAttributes struct { - Path string `json:"path,omitempty"` - HttpMethod string `json:"http_method,omitempty"` - HttpRequestBody string `json:"http_request_body,omitempty"` -} - -func AttributesMapToVaultCredentialLibraryAttributes(in map[string]interface{}) (*VaultCredentialLibraryAttributes, error) { - if in == nil { - return nil, fmt.Errorf("nil input map") - } - var out VaultCredentialLibraryAttributes - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - TagName: "json", - }) - if err != nil { - return nil, fmt.Errorf("error creating mapstructure decoder: %w", err) - } - if err := dec.Decode(in); err != nil { - return nil, fmt.Errorf("error decoding: %w", err) - } - return &out, nil -} - -func (pt *CredentialLibrary) GetVaultCredentialLibraryAttributes() (*VaultCredentialLibraryAttributes, error) { - if pt.Type != "vault-generic" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-library is of type %s", "vault-generic", pt.Type) - } - return AttributesMapToVaultCredentialLibraryAttributes(pt.Attributes) -} diff --git a/api/sessionrecordings/vault_credential_store_attributes.gen.go b/api/sessionrecordings/vault_credential_store_attributes.gen.go deleted file mode 100644 index d0a06a986f0..00000000000 --- a/api/sessionrecordings/vault_credential_store_attributes.gen.go +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -import ( - "fmt" - - "github.com/mitchellh/mapstructure" -) - -type VaultCredentialStoreAttributes struct { - Address string `json:"address,omitempty"` - Namespace string `json:"namespace,omitempty"` - TlsServerName string `json:"tls_server_name,omitempty"` - TlsSkipVerify bool `json:"tls_skip_verify,omitempty"` - WorkerFilter string `json:"worker_filter,omitempty"` -} - -func AttributesMapToVaultCredentialStoreAttributes(in map[string]interface{}) (*VaultCredentialStoreAttributes, error) { - if in == nil { - return nil, fmt.Errorf("nil input map") - } - var out VaultCredentialStoreAttributes - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - TagName: "json", - }) - if err != nil { - return nil, fmt.Errorf("error creating mapstructure decoder: %w", err) - } - if err := dec.Decode(in); err != nil { - return nil, fmt.Errorf("error decoding: %w", err) - } - return &out, nil -} - -func (pt *CredentialStore) GetVaultCredentialStoreAttributes() (*VaultCredentialStoreAttributes, error) { - if pt.Type != "vault" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-store is of type %s", "vault", pt.Type) - } - return AttributesMapToVaultCredentialStoreAttributes(pt.Attributes) -} diff --git a/api/sessionrecordings/vault_ssh_certificate_credential_library_attributes.gen.go b/api/sessionrecordings/vault_ssh_certificate_credential_library_attributes.gen.go deleted file mode 100644 index 4b603488933..00000000000 --- a/api/sessionrecordings/vault_ssh_certificate_credential_library_attributes.gen.go +++ /dev/null @@ -1,46 +0,0 @@ -// Code generated by "make api"; DO NOT EDIT. -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package sessionrecordings - -import ( - "fmt" - - "github.com/mitchellh/mapstructure" -) - -type VaultSSHCertificateCredentialLibraryAttributes struct { - Path string `json:"path,omitempty"` - Username string `json:"username,omitempty"` - KeyType string `json:"key_type,omitempty"` - KeyBits uint32 `json:"key_bits,omitempty"` - Ttl string `json:"ttl,omitempty"` - CriticalOptions map[string]string `json:"critical_options,omitempty"` - Extensions map[string]string `json:"extensions,omitempty"` -} - -func AttributesMapToVaultSSHCertificateCredentialLibraryAttributes(in map[string]interface{}) (*VaultSSHCertificateCredentialLibraryAttributes, error) { - if in == nil { - return nil, fmt.Errorf("nil input map") - } - var out VaultSSHCertificateCredentialLibraryAttributes - dec, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &out, - TagName: "json", - }) - if err != nil { - return nil, fmt.Errorf("error creating mapstructure decoder: %w", err) - } - if err := dec.Decode(in); err != nil { - return nil, fmt.Errorf("error decoding: %w", err) - } - return &out, nil -} - -func (pt *CredentialLibrary) GetVaultSSHCertificateCredentialLibraryAttributes() (*VaultSSHCertificateCredentialLibraryAttributes, error) { - if pt.Type != "vault-ssh-certificate" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but credential-library is of type %s", "vault-ssh-certificate", pt.Type) - } - return AttributesMapToVaultSSHCertificateCredentialLibraryAttributes(pt.Attributes) -} diff --git a/enos/ci/service-user-iam/main.tf b/enos/ci/service-user-iam/main.tf index af87ab7e0fe..2453dab37e3 100644 --- a/enos/ci/service-user-iam/main.tf +++ b/enos/ci/service-user-iam/main.tf @@ -218,13 +218,9 @@ data "aws_iam_policy_document" "enos_policy_document" { "rds:ModifyDBSubnetGroup", "rds:RemoveTagsFromResource", "s3:ListAllMyBuckets", - "s3:CreateBucket*", - "s3:DeleteBucket*", - "s3:GetBucket*", - "s3:HeadBucket", - "s3:PutBucket*" + "s3:CreateBucket", + "s3:DeleteBucket", ] - resources = ["*"] } } @@ -232,7 +228,6 @@ data "aws_iam_policy_document" "enos_policy_document" { data "aws_iam_policy_document" "aws_nuke_policy_document" { provider = aws.us_east_1 - statement { effect = "Allow" actions = [ @@ -252,12 +247,9 @@ data "aws_iam_policy_document" "aws_nuke_policy_document" { "iam:ListUsers", "iam:UntagUser", "servicequotas:ListServiceQuotas", - "s3:Head*", - "s3:List*", - "s3:Get*", - "s3:Delete*" + "s3:ListAllMyBuckets", + "s3:DeleteBucket", ] - resources = ["*"] } } @@ -271,45 +263,23 @@ resource "aws_iam_policy" "demo_user" { data "aws_iam_policy_document" "demo_user_policy_document" { statement { - sid = "BoundaryHostPlugin" + sid = "DemoUserEC2Permissions" actions = [ "ec2:DescribeInstances*" ] - resources = ["*"] } - statement { - sid = "BoundarySessionS3OnlyMyAccount" - effect = "Allow" - actions = [ - "s3:DeleteObject", - "s3:GetObject", - "s3:GetObjectAttributes", - "s3:PutObject", - ] - - condition { - test = "StringEquals" - variable = "s3:ResourceAccount" - values = [data.aws_caller_identity.current.account_id] - } - - resources = ["*"] - } - - statement { - sid = "IAMAKRotate" + sid = "DemoUserIAMPermissions" actions = [ "iam:CreateAccessKey", "iam:DeleteAccessKey", "iam:ListAccessKeys", "iam:UpdateAccessKey" ] + resources = ["arn:aws:iam::147451547303:user/&{aws:username}"] - resources = ["arn:aws:iam::${data.aws_caller_identity.current.account_id}:user/&{aws:username}"] } - statement { sid = "ExplicitDeny" effect = "Deny" @@ -319,11 +289,7 @@ data "aws_iam_policy_document" "demo_user_policy_document" { "iam:CreateAccessKey", "iam:DeleteAccessKey", "iam:ListAccessKeys", - "iam:UpdateAccessKey", - "s3:DeleteObject", - "s3:GetObject", - "s3:GetObjectAttributes", - "s3:PutObject", - ] + "iam:UpdateAccessKey" + ] } } diff --git a/enos/enos-modules.hcl b/enos/enos-modules.hcl index 517bbb3de4e..b6f472b8b8e 100644 --- a/enos/enos-modules.hcl +++ b/enos/enos-modules.hcl @@ -40,10 +40,6 @@ module "worker" { ssh_aws_keypair = var.aws_ssh_keypair_name } -module "bucket" { - source = "./modules/bucket" -} - module "build_crt" { source = "./modules/build_crt" } diff --git a/enos/enos-scenario-e2e-aws.hcl b/enos/enos-scenario-e2e-aws.hcl index 04ffb23e016..3b49d5f3cf1 100644 --- a/enos/enos-scenario-e2e-aws.hcl +++ b/enos/enos-scenario-e2e-aws.hcl @@ -195,7 +195,6 @@ scenario "e2e_aws" { controller_addresses = step.create_boundary_cluster.public_controller_addresses controller_sg_id = step.create_boundary_cluster.controller_aux_sg_id worker_type_tags = ["worker_e2e_test"] - config_file_path = "templates/worker.hcl" } } diff --git a/enos/enos-scenario-e2e-docker-base.hcl b/enos/enos-scenario-e2e-docker-base.hcl deleted file mode 100644 index dd3c44c5bf8..00000000000 --- a/enos/enos-scenario-e2e-docker-base.hcl +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - -# For this scenario to work, add the following line to /etc/hosts -# 127.0.0.1 localhost boundary - -scenario "e2e_docker_base" { - terraform_cli = terraform_cli.default - terraform = terraform.default - providers = [ - provider.enos.default - ] - - matrix { - builder = ["local", "crt"] - } - - locals { - aws_ssh_private_key_path = abspath(var.aws_ssh_private_key_path) - local_boundary_dir = abspath(var.local_boundary_dir) - boundary_docker_image_file = abspath(var.boundary_docker_image_file) - license_path = abspath(var.boundary_license_path != null ? var.boundary_license_path : joinpath(path.root, "./support/boundary.hclic")) - - tags = merge({ - "Project Name" : var.project_name - "Project" : "Enos", - "Environment" : "ci" - }, var.tags) - } - - step "build_boundary_docker_image" { - module = matrix.builder == "crt" ? module.build_boundary_docker_crt : module.build_boundary_docker_local - - variables { - path = matrix.builder == "crt" ? local.boundary_docker_image_file : "/tmp/boundary_docker_image.tar" - } - } - - step "create_docker_network" { - module = module.docker_network - } - - step "create_boundary_database" { - depends_on = [ - step.create_docker_network - ] - variables { - image_name = "${var.docker_mirror}/library/postgres:latest" - network_name = step.create_docker_network.network_name - } - module = module.docker_postgres - } - - step "read_license" { - skip_step = var.boundary_edition == "oss" - module = module.read_license - - variables { - file_name = local.license_path - } - } - - step "create_boundary" { - module = module.docker_boundary - depends_on = [ - step.create_docker_network, - step.create_boundary_database, - step.build_boundary_docker_image - ] - variables { - image_name = matrix.builder == "crt" ? var.boundary_docker_image_name : step.build_boundary_docker_image.image_name - network_name = step.create_docker_network.network_name - postgres_address = step.create_boundary_database.address - boundary_license = var.boundary_edition != "oss" ? step.read_license.license : "" - } - } - - step "create_host" { - module = module.docker_openssh_server - depends_on = [ - step.create_docker_network - ] - variables { - image_name = "${var.docker_mirror}/linuxserver/openssh-server:latest" - network_name = step.create_docker_network.network_name - private_key_file_path = local.aws_ssh_private_key_path - } - } - - step "run_e2e_test" { - module = module.test_e2e - depends_on = [ - step.create_boundary, - step.create_host, - step.create_boundary_database - ] - - variables { - test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/base" - debug_no_run = var.e2e_debug_no_run - alb_boundary_api_addr = step.create_boundary.address - auth_method_id = step.create_boundary.auth_method_id - auth_login_name = step.create_boundary.login_name - auth_password = step.create_boundary.password - local_boundary_dir = local.local_boundary_dir - aws_ssh_private_key_path = local.aws_ssh_private_key_path - target_ip = step.create_host.address - target_port = step.create_host.port - target_user = "ubuntu" - } - } - - output "test_results" { - value = step.run_e2e_test.test_results - } -} diff --git a/enos/enos-scenario-e2e-docker-base-with-vault.hcl b/enos/enos-scenario-e2e-docker.hcl similarity index 86% rename from enos/enos-scenario-e2e-docker-base-with-vault.hcl rename to enos/enos-scenario-e2e-docker.hcl index 765ac527876..31ea39d396a 100644 --- a/enos/enos-scenario-e2e-docker-base-with-vault.hcl +++ b/enos/enos-scenario-e2e-docker.hcl @@ -4,10 +4,11 @@ # For this scenario to work, add the following line to /etc/hosts # 127.0.0.1 localhost boundary -scenario "e2e_docker_base_with_vault" { +scenario "e2e_docker" { terraform_cli = terraform_cli.default terraform = terraform.default providers = [ + provider.aws.default, provider.enos.default ] @@ -19,8 +20,10 @@ scenario "e2e_docker_base_with_vault" { aws_ssh_private_key_path = abspath(var.aws_ssh_private_key_path) local_boundary_dir = abspath(var.local_boundary_dir) boundary_docker_image_file = abspath(var.boundary_docker_image_file) - license_path = abspath(var.boundary_license_path != null ? var.boundary_license_path : joinpath(path.root, "./support/boundary.hclic")) - + build_path = { + "local" = "/tmp", + "crt" = var.crt_bundle_path == null ? null : abspath(var.crt_bundle_path) + } tags = merge({ "Project Name" : var.project_name "Project" : "Enos", @@ -51,15 +54,6 @@ scenario "e2e_docker_base_with_vault" { module = module.docker_postgres } - step "read_license" { - skip_step = var.boundary_edition == "oss" - module = module.read_license - - variables { - file_name = local.license_path - } - } - step "create_boundary" { module = module.docker_boundary depends_on = [ @@ -71,7 +65,6 @@ scenario "e2e_docker_base_with_vault" { image_name = matrix.builder == "crt" ? var.boundary_docker_image_name : step.build_boundary_docker_image.image_name network_name = step.create_docker_network.network_name postgres_address = step.create_boundary_database.address - boundary_license = var.boundary_edition != "oss" ? step.read_license.license : "" } } @@ -108,7 +101,7 @@ scenario "e2e_docker_base_with_vault" { ] variables { - test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/base_with_vault" + test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/static_with_vault" debug_no_run = var.e2e_debug_no_run alb_boundary_api_addr = step.create_boundary.address auth_method_id = step.create_boundary.auth_method_id @@ -122,7 +115,6 @@ scenario "e2e_docker_base_with_vault" { vault_addr = step.create_vault.address vault_addr_internal = step.create_vault.address_internal vault_root_token = step.create_vault.token - vault_port = step.create_vault.port } } diff --git a/enos/enos-scenario-e2e-aws-base-with-vault.hcl b/enos/enos-scenario-e2e-static-with-vault.hcl similarity index 98% rename from enos/enos-scenario-e2e-aws-base-with-vault.hcl rename to enos/enos-scenario-e2e-static-with-vault.hcl index 6049922849f..0f22d850a75 100644 --- a/enos/enos-scenario-e2e-aws-base-with-vault.hcl +++ b/enos/enos-scenario-e2e-static-with-vault.hcl @@ -1,7 +1,7 @@ # Copyright (c) HashiCorp, Inc. # SPDX-License-Identifier: MPL-2.0 -scenario "e2e_aws_base_with_vault" { +scenario "e2e_static_with_vault" { terraform_cli = terraform_cli.default terraform = terraform.default providers = [ @@ -144,7 +144,7 @@ scenario "e2e_aws_base_with_vault" { ] variables { - test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/base_with_vault" + test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/static_with_vault" debug_no_run = var.e2e_debug_no_run alb_boundary_api_addr = step.create_boundary_cluster.alb_boundary_api_addr auth_method_id = step.create_boundary_cluster.auth_method_id diff --git a/enos/enos-scenario-e2e-aws-base.hcl b/enos/enos-scenario-e2e-static.hcl similarity index 98% rename from enos/enos-scenario-e2e-aws-base.hcl rename to enos/enos-scenario-e2e-static.hcl index e1ed105ce3e..d1465097236 100644 --- a/enos/enos-scenario-e2e-aws-base.hcl +++ b/enos/enos-scenario-e2e-static.hcl @@ -1,7 +1,7 @@ # Copyright (c) HashiCorp, Inc. # SPDX-License-Identifier: MPL-2.0 -scenario "e2e_aws_base" { +scenario "e2e_static" { terraform_cli = terraform_cli.default terraform = terraform.default providers = [ @@ -121,7 +121,7 @@ scenario "e2e_aws_base" { ] variables { - test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/base" + test_package = "github.com/hashicorp/boundary/testing/internal/e2e/tests/static" debug_no_run = var.e2e_debug_no_run alb_boundary_api_addr = step.create_boundary_cluster.alb_boundary_api_addr auth_method_id = step.create_boundary_cluster.auth_method_id diff --git a/enos/enos-variables.hcl b/enos/enos-variables.hcl index 1c1c7d7cdba..4c77b8d285e 100644 --- a/enos/enos-variables.hcl +++ b/enos/enos-variables.hcl @@ -178,8 +178,3 @@ variable "go_test_timeout" { type = string default = "10m" } - -variable "aws_region" { - description = "AWS region where the resources will be created" - type = string -} diff --git a/enos/modules/docker_boundary/init.sh b/enos/modules/docker_boundary/init.sh index d5a2233979f..f5b72d0483a 100644 --- a/enos/modules/docker_boundary/init.sh +++ b/enos/modules/docker_boundary/init.sh @@ -15,7 +15,6 @@ docker run \ --rm \ --name $TEST_CONTAINER_NAME \ -e "BOUNDARY_POSTGRES_URL=$TEST_DATABASE_ADDRESS" \ - -e "BOUNDARY_LICENSE=$TEST_BOUNDARY_LICENSE" \ -e "SKIP_CHOWN=true" \ --cap-add IPC_LOCK \ --mount type=bind,src=$SOURCE,dst=/boundary/ \ diff --git a/enos/modules/docker_boundary/main.tf b/enos/modules/docker_boundary/main.tf index ff45af45f32..64aee832f26 100644 --- a/enos/modules/docker_boundary/main.tf +++ b/enos/modules/docker_boundary/main.tf @@ -36,10 +36,6 @@ variable "postgres_address" { description = "Address to postgres database" type = string } -variable "boundary_license" { - description = "License string" - type = string -} resource "docker_image" "boundary" { @@ -52,7 +48,6 @@ resource "enos_local_exec" "init_database" { TEST_BOUNDARY_IMAGE = var.image_name, TEST_DATABASE_ADDRESS = var.postgres_address, TEST_NETWORK_NAME = var.network_name - TEST_BOUNDARY_LICENSE = var.boundary_license } inline = ["bash ./${path.module}/init.sh"] } @@ -73,7 +68,6 @@ resource "docker_container" "boundary" { command = ["boundary", "server", "-config", "/boundary/boundary-config.hcl"] env = [ "BOUNDARY_POSTGRES_URL=${var.postgres_address}", - "BOUNDARY_LICENSE=${var.boundary_license}", "HOSTNAME=boundary", "SKIP_CHOWN=true", ] diff --git a/enos/modules/docker_vault/main.tf b/enos/modules/docker_vault/main.tf index d128388734e..b5635da3b47 100644 --- a/enos/modules/docker_vault/main.tf +++ b/enos/modules/docker_vault/main.tf @@ -37,11 +37,6 @@ variable "vault_token" { type = string default = "boundarytok" } -variable "vault_port" { - description = "External Port to use" - type = string - default = "8300" -} resource "docker_image" "vault" { name = var.image_name @@ -56,7 +51,7 @@ resource "docker_container" "vault" { ] ports { internal = 8200 - external = var.vault_port + external = 8200 } capabilities { add = ["IPC_LOCK"] @@ -71,7 +66,7 @@ resource "enos_local_exec" "check_address" { docker_container.vault ] - inline = ["timeout 10s bash -c 'until curl http://0.0.0.0:${var.vault_port}; do sleep 2; done'"] + inline = ["timeout 10s bash -c 'until curl http://0.0.0.0:8200; do sleep 2; done'"] } resource "enos_local_exec" "check_health" { @@ -80,7 +75,7 @@ resource "enos_local_exec" "check_health" { ] environment = { - VAULT_ADDR = "http://0.0.0.0:${var.vault_port}" + VAULT_ADDR = "http://0.0.0.0:8200" VAULT_TOKEN = var.vault_token } @@ -98,7 +93,3 @@ output "address_internal" { output "token" { value = var.vault_token } - -output "port" { - value = var.vault_port -} diff --git a/enos/modules/test_e2e/main.tf b/enos/modules/test_e2e/main.tf index b3c34e8e496..97be1d8cf4a 100644 --- a/enos/modules/test_e2e/main.tf +++ b/enos/modules/test_e2e/main.tf @@ -77,11 +77,6 @@ variable "vault_root_token" { type = string default = "" } -variable "vault_port" { - description = "External Port that vault instance is attached to (outside of docker network)" - type = string - default = "8200" -} variable "aws_access_key_id" { description = "Access Key Id for AWS IAM user used in dynamic host catalogs" type = string @@ -117,16 +112,6 @@ variable "aws_host_set_ips2" { type = list(string) default = [""] } -variable "aws_region" { - description = "AWS region where the resources will be created" - type = string - default = "" -} -variable "aws_bucket_name" { - description = "AWS S3 bucket name" - type = string - default = "" -} variable "worker_tags" { type = list(string) default = [""] @@ -138,7 +123,7 @@ variable "test_timeout" { locals { aws_ssh_private_key_path = abspath(var.aws_ssh_private_key_path) - vault_addr = var.vault_addr != "" ? "http://${var.vault_addr}:${var.vault_port}" : "" + vault_addr = var.vault_addr != "" ? "http://${var.vault_addr}:8200" : "" vault_addr_internal = var.vault_addr_internal != "" ? "http://${var.vault_addr_internal}:8200" : local.vault_addr aws_host_set_ips1 = jsonencode(var.aws_host_set_ips1) aws_host_set_ips2 = jsonencode(var.aws_host_set_ips2) @@ -164,15 +149,11 @@ resource "enos_local_exec" "run_e2e_test" { E2E_AWS_HOST_SET_FILTER = var.aws_host_set_filter1, E2E_AWS_HOST_SET_IPS = local.aws_host_set_ips1, E2E_AWS_HOST_SET_FILTER2 = var.aws_host_set_filter2, - E2E_AWS_HOST_SET_IPS2 = local.aws_host_set_ips2, - E2E_AWS_REGION = var.aws_region, - E2E_AWS_BUCKET_NAME = var.aws_bucket_name, + E2E_AWS_HOST_SET_IPS2 = local.aws_host_set_ips2 E2E_WORKER_TAG = jsonencode(var.worker_tags), } - inline = var.debug_no_run ? [""] : [ - "set -o pipefail; PATH=\"${var.local_boundary_dir}:$PATH\" go test -v ${var.test_package} -count=1 -json -timeout ${var.test_timeout}| tparse -follow -format plain 2>&1 | tee ${path.module}/../../test-e2e-${local.package_name}.log" - ] + inline = var.debug_no_run ? [""] : ["set -o pipefail; PATH=\"${var.local_boundary_dir}:$PATH\" go test -v ${var.test_package} -count=1 -json -timeout ${var.test_timeout}| tparse -follow -format plain 2>&1 | tee ${path.module}/../../test-e2e-${local.package_name}.log"] } output "test_results" { diff --git a/enos/modules/test_e2e_ui/main.tf b/enos/modules/test_e2e_ui/main.tf index 4dcb40d93a3..e05e47542c6 100644 --- a/enos/modules/test_e2e_ui/main.tf +++ b/enos/modules/test_e2e_ui/main.tf @@ -77,11 +77,6 @@ variable "vault_root_token" { type = string default = "" } -variable "vault_port" { - description = "External Port that vault instance is attached to (outside of docker network)" - type = string - default = "8200" -} variable "aws_access_key_id" { description = "Access Key Id for AWS IAM user used in dynamic host catalogs" type = string @@ -120,7 +115,7 @@ variable "aws_host_set_ips2" { locals { aws_ssh_private_key_path = abspath(var.aws_ssh_private_key_path) - vault_addr = var.vault_addr != "" ? "http://${var.vault_addr}:${var.vault_port}" : "" + vault_addr = var.vault_addr != "" ? "http://${var.vault_addr}:8200" : "" vault_addr_internal = var.vault_addr_internal != "" ? "http://${var.vault_addr_internal}:8200" : local.vault_addr aws_host_set_ips1 = jsonencode(var.aws_host_set_ips1) aws_host_set_ips2 = jsonencode(var.aws_host_set_ips2) diff --git a/enos/modules/worker/main.tf b/enos/modules/worker/main.tf index 27bed20cfd4..6d335792076 100644 --- a/enos/modules/worker/main.tf +++ b/enos/modules/worker/main.tf @@ -173,14 +173,13 @@ resource "enos_file" "worker_config" { depends_on = [enos_bundle_install.worker] destination = "/etc/boundary/boundary.hcl" - content = templatefile("${path.module}/${var.config_file_path}", { - id = random_pet.worker.id - kms_key_id = data.aws_kms_key.kms_key.id - public_addr = aws_instance.worker.public_ip - type = jsonencode(var.worker_type_tags) - region = data.aws_availability_zone.worker_az.region - controller_addresses = jsonencode(var.controller_addresses) - recording_storage_path = var.recording_storage_path + content = templatefile("${path.module}/templates/worker.hcl", { + id = random_pet.worker.id + kms_key_id = data.aws_kms_key.kms_key.id + public_addr = aws_instance.worker.public_ip + type = jsonencode(var.worker_type_tags) + region = data.aws_availability_zone.worker_az.region + controller_addresses = jsonencode(var.controller_addresses) }) transport = { @@ -196,9 +195,8 @@ resource "enos_boundary_start" "worker_start" { aws_vpc_security_group_ingress_rule.worker_to_controller, ] - bin_path = "/opt/boundary/bin" - config_path = "/etc/boundary" - recording_storage_path = var.recording_storage_path != "" ? var.recording_storage_path : null + bin_path = "/opt/boundary/bin" + config_path = "/etc/boundary" transport = { ssh = { host = aws_instance.worker.public_ip diff --git a/enos/modules/worker/templates/worker_bsr.hcl b/enos/modules/worker/templates/worker_bsr.hcl deleted file mode 100644 index 1ef20c32d5b..00000000000 --- a/enos/modules/worker/templates/worker_bsr.hcl +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) HashiCorp, Inc. -# SPDX-License-Identifier: MPL-2.0 - -listener "tcp" { - purpose = "proxy" - tls_disable = true - address = "0.0.0.0" -} - -worker { - # Name attr must be unique across workers - name = "worker-${id}" - description = "Enos Boundary worker ${id}" - - # Workers must be able to reach controllers on :9201 - initial_upstreams = ${controller_addresses} - - public_addr = "${public_addr}" - - tags { - region = ["${region}"] - type = ${type} - } - - recording_storage_path = "${recording_storage_path}" -} - -# must be same key as used on controller config -kms "awskms" { - purpose = "worker-auth" - region = "${region}" - kms_key_id = "${kms_key_id}" -} diff --git a/enos/modules/worker/variables.tf b/enos/modules/worker/variables.tf index f0d7cb111a4..17ff98496a3 100644 --- a/enos/modules/worker/variables.tf +++ b/enos/modules/worker/variables.tf @@ -129,15 +129,3 @@ variable "controller_sg_id" { description = "The controllers' security group ID for adding rules allowing this worker to communicate with them" type = string } - -variable "config_file_path" { - description = "Path to a config file (relative to module directory)" - type = string - default = "templates/worker.hcl" -} - -variable "recording_storage_path" { - description = "Path on instance to store recordings" - type = string - default = "" -} diff --git a/globals/fields.go b/globals/fields.go index 2b8d3e5eb96..f5ac55674a5 100644 --- a/globals/fields.go +++ b/globals/fields.go @@ -5,7 +5,6 @@ package globals const ( IdField = "id" - IdsField = "ids" VersionField = "version" NameField = "name" DescriptionField = "description" diff --git a/globals/prefixes.go b/globals/prefixes.go index c1bd26e7336..3e8adae7deb 100644 --- a/globals/prefixes.go +++ b/globals/prefixes.go @@ -163,14 +163,6 @@ var prefixToResourceType = map[string]resource.Type{ SessionRecordingPrefix: resource.SessionRecording, } -var resourceTypeToPrefixes map[resource.Type][]string = func() map[resource.Type][]string { - ret := make(map[resource.Type][]string) - for k, v := range prefixToResourceType { - ret[v] = append(ret[v], k) - } - return ret -}() - // ResourceTypeFromPrefix takes in a resource ID (or a prefix) and returns the // corresponding resource typ func ResourceTypeFromPrefix(in string) resource.Type { @@ -178,9 +170,3 @@ func ResourceTypeFromPrefix(in string) resource.Type { in, _, _ = strings.Cut(in, "_") return prefixToResourceType[in] } - -// ResourcePrefixesFromType returns the known prefixes for a given type; if a -// type is not known the return value will be nil -func ResourcePrefixesFromType(in resource.Type) []string { - return resourceTypeToPrefixes[in] -} diff --git a/go.mod b/go.mod index b971f6e24ce..ebdda08fdd4 100644 --- a/go.mod +++ b/go.mod @@ -97,7 +97,7 @@ require ( github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/nodeenrollment v0.2.4 github.com/jackc/pgx/v5 v5.3.1 - github.com/jimlambrt/gldap v0.1.7 + github.com/jimlambrt/gldap v0.1.6 github.com/kelseyhightower/envconfig v1.4.0 github.com/mikesmitty/edkey v0.0.0-20170222072505-3356ea4e686a golang.org/x/exp v0.0.0-20230425010034-47ecfdc1ba53 diff --git a/go.sum b/go.sum index 4b2f59cc611..37187934e27 100644 --- a/go.sum +++ b/go.sum @@ -851,8 +851,8 @@ github.com/jefferai/keyring v1.1.7-0.20220316160357-58a74bb55891/go.mod h1:iwmrB github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jhump/protoreflect v1.9.1-0.20210817181203-db1a327a393e h1:Yb4fEGk+GtBSNuvy5rs0ZJt/jtopc/z9azQaj3xbies= -github.com/jimlambrt/gldap v0.1.7 h1:q6W1xyjnHax/JAhjsN/EQ88+DCOEYPy/GDM7/3tk7bA= -github.com/jimlambrt/gldap v0.1.7/go.mod h1:BRdefIDhx2uYBjxL0fRBGi3eyOvAkkRIXSJYMCyzCaI= +github.com/jimlambrt/gldap v0.1.6 h1:fnpRGhuHxWjavhDvEjwwveneNrQZuoEfOoDxKWYmtF8= +github.com/jimlambrt/gldap v0.1.6/go.mod h1:BRdefIDhx2uYBjxL0fRBGi3eyOvAkkRIXSJYMCyzCaI= github.com/jinzhu/gorm v1.9.12 h1:Drgk1clyWT9t9ERbzHza6Mj/8FY/CqMyVzOiHviMo6Q= github.com/jinzhu/gorm v1.9.12/go.mod h1:vhTjlKSJUTWNtcbQtrMBFCxy7eXTzeCAzfL5fBZT/Qs= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= diff --git a/internal/api/genapi/input.go b/internal/api/genapi/input.go index 2a5d99146e4..fe51a698ec4 100644 --- a/internal/api/genapi/input.go +++ b/internal/api/genapi/input.go @@ -80,11 +80,6 @@ type structInfo struct { // the attributes map. subtypeName string - // subtype specifies exactly the value expected in the resource's "type" - // Field. This is used when checking if the attributes returned can be - // marshaled into a specific generated attributes struct. - subtype string - // For non-top-level collections, this can be used to indicate the name of // the argument that should be used parentTypeName string @@ -491,7 +486,7 @@ var inputStructs = []*structInfo{ listTemplate, }, pluralResourceName: "auth-tokens", - createResponseTypes: []string{ReadResponseType, UpdateResponseType, DeleteResponseType, ListResponseType}, + createResponseTypes: []string{CreateResponseType, ReadResponseType, UpdateResponseType, DeleteResponseType, ListResponseType}, recursiveListing: true, }, // Credentials @@ -554,7 +549,6 @@ var inputStructs = []*structInfo{ inProto: &credentiallibraries.VaultCredentialLibraryAttributes{}, outFile: "credentiallibraries/vault_credential_library_attributes.gen.go", subtypeName: "VaultCredentialLibrary", - subtype: "vault-generic", fieldOverrides: []fieldInfo{ { Name: "Path", @@ -570,7 +564,6 @@ var inputStructs = []*structInfo{ inProto: &credentiallibraries.VaultSSHCertificateCredentialLibraryAttributes{}, outFile: "credentiallibraries/vault_ssh_certificate_credential_library_attributes.gen.go", subtypeName: "VaultSSHCertificateCredentialLibrary", - subtype: "vault-ssh-certificate", fieldOverrides: []fieldInfo{ { Name: "Path", @@ -633,7 +626,6 @@ var inputStructs = []*structInfo{ inProto: &credentials.UsernamePasswordAttributes{}, outFile: "credentials/username_password_attributes.gen.go", subtypeName: "UsernamePasswordCredential", - subtype: "username_password", fieldOverrides: []fieldInfo{ { Name: "Username", @@ -653,7 +645,6 @@ var inputStructs = []*structInfo{ inProto: &credentials.SshPrivateKeyAttributes{}, outFile: "credentials/ssh_private_key_attributes.gen.go", subtypeName: "SshPrivateKeyCredential", - subtype: "ssh_private_key", fieldOverrides: []fieldInfo{ { Name: "Username", @@ -1055,82 +1046,6 @@ var inputStructs = []*structInfo{ inProto: &session_recordings.HostCatalog{}, outFile: "sessionrecordings/host_catalog.gen.go", }, - { - inProto: &session_recordings.Credential{}, - outFile: "sessionrecordings/credential.gen.go", - }, - { - inProto: &session_recordings.UsernamePasswordCredentialAttributes{}, - outFile: "sessionrecordings/username_password_credential_attributes.gen.go", - subtype: "username_password", - parentTypeName: "Credential", - templates: []*template.Template{ - mapstructureConversionTemplate, - }, - }, - { - inProto: &session_recordings.JsonCredentialAttributes{}, - outFile: "sessionrecordings/json_credential_attributes.gen.go", - subtype: "json", - parentTypeName: "Credential", - templates: []*template.Template{ - mapstructureConversionTemplate, - }, - }, - { - inProto: &session_recordings.SshPrivateKeyCredentialAttributes{}, - outFile: "sessionrecordings/ssh_private_key_credential_attributes.gen.go", - subtype: "ssh_private_key", - parentTypeName: "Credential", - templates: []*template.Template{ - mapstructureConversionTemplate, - }, - }, - { - inProto: &session_recordings.CredentialLibrary{}, - outFile: "sessionrecordings/credential_library.gen.go", - }, - { - inProto: &session_recordings.VaultCredentialLibraryAttributes{}, - outFile: "sessionrecordings/vault_credential_library_attributes.gen.go", - subtype: "vault-generic", - parentTypeName: "CredentialLibrary", - templates: []*template.Template{ - mapstructureConversionTemplate, - }, - }, - { - inProto: &session_recordings.VaultSSHCertificateCredentialLibraryAttributes{}, - outFile: "sessionrecordings/vault_ssh_certificate_credential_library_attributes.gen.go", - subtype: "vault-ssh-certificate", - parentTypeName: "CredentialLibrary", - fieldOverrides: []fieldInfo{ - { - Name: "CriticalOptions", - FieldType: "map[string]string", - }, - { - Name: "Extensions", - FieldType: "map[string]string", - }, - }, - templates: []*template.Template{ - mapstructureConversionTemplate, - }, - }, - { - inProto: &session_recordings.CredentialStore{}, - outFile: "sessionrecordings/credential_store.gen.go", - }, - { - inProto: &session_recordings.VaultCredentialStoreAttributes{}, - outFile: "sessionrecordings/vault_credential_store_attributes.gen.go", - subtype: "vault", - parentTypeName: "CredentialStore", - templates: []*template.Template{ - mapstructureConversionTemplate, - }, - }, { inProto: &session_recordings.ValuesAtTime{}, outFile: "sessionrecordings/values_at_time.gen.go", diff --git a/internal/api/genapi/templates.go b/internal/api/genapi/templates.go index 8d2db908320..3e7ead32fac 100644 --- a/internal/api/genapi/templates.go +++ b/internal/api/genapi/templates.go @@ -53,7 +53,6 @@ type templateInput struct { CreateResponseTypes []string SkipListFiltering bool RecursiveListing bool - Subtype string } func fillTemplates() { @@ -73,7 +72,6 @@ func fillTemplates() { CreateResponseTypes: in.createResponseTypes, SkipListFiltering: in.skipListFiltering, RecursiveListing: in.recursiveListing, - Subtype: in.subtype, } if in.packageOverride != "" { input.Package = in.packageOverride @@ -852,8 +850,8 @@ func AttributesMapTo{{ .Name }}(in map[string]interface{}) (*{{ .Name }}, error) } func (pt *{{ .ParentTypeName }}) Get{{ .Name }}() (*{{ .Name }}, error) { - if pt.Type != "{{ typeFromSubtype .Subtype .Name .ParentTypeName "Attributes"}}" { - return nil, fmt.Errorf("asked to fetch %s-type attributes but {{ kebabCase .ParentTypeName }} is of type %s", "{{ typeFromSubtype .Subtype .Name .ParentTypeName "Attributes"}}", pt.Type) + if pt.Type != "{{ typeFromSubtype .Name .ParentTypeName "Attributes"}}" { + return nil, fmt.Errorf("asked to fetch %s-type attributes but {{ kebabCase .ParentTypeName }} is of type %s", "{{ typeFromSubtype .Name .ParentTypeName "Attributes"}}", pt.Type) } return AttributesMapTo{{ .Name }}(pt.Attributes) } @@ -897,9 +895,6 @@ func removeDups(in []string) []string { return ret } -func typeFromSubtype(subtype, in, parent, extraSuffix string) string { - if subtype != "" { - return subtype - } +func typeFromSubtype(in, parent, extraSuffix string) string { return strings.ToLower(strings.TrimSuffix(strings.TrimSuffix(in, extraSuffix), parent)) } diff --git a/internal/auth/additional_verification_test.go b/internal/auth/additional_verification_test.go index 803921a681b..2f24998d4c3 100644 --- a/internal/auth/additional_verification_test.go +++ b/internal/auth/additional_verification_test.go @@ -216,8 +216,7 @@ func TestSelfReadingDifferentOutputFields(t *testing.T) { conn := tc.DbConn() - s, err := authmethodsservice.NewService(tc.Context(), - tc.Kms(), + s, err := authmethodsservice.NewService(tc.Kms(), tc.Controller().PasswordAuthRepoFn, tc.Controller().OidcRepoFn, tc.Controller().IamRepoFn, diff --git a/internal/auth/ldap/ids.go b/internal/auth/ldap/ids.go index 9c1db171339..2d5dd7764b7 100644 --- a/internal/auth/ldap/ids.go +++ b/internal/auth/ldap/ids.go @@ -25,7 +25,7 @@ const ( func newAuthMethodId(ctx context.Context) (string, error) { const op = "ldap.newAuthMethodId" - id, err := db.NewPublicId(ctx, globals.LdapAuthMethodPrefix) + id, err := db.NewPublicId(globals.LdapAuthMethodPrefix) if err != nil { return "", errors.Wrap(ctx, err, op) } @@ -35,7 +35,7 @@ func newAuthMethodId(ctx context.Context) (string, error) { func newAccountId(ctx context.Context, authMethodId, loginName string) (string, error) { const op = "ldap.newAccountId" // there's a unique index on: auth method id + login name - id, err := db.NewPublicId(ctx, globals.LdapAccountPrefix, db.WithPrngValues([]string{authMethodId, loginName})) + id, err := db.NewPublicId(globals.LdapAccountPrefix, db.WithPrngValues([]string{authMethodId, loginName})) if err != nil { return "", errors.Wrap(ctx, err, op) } @@ -44,7 +44,7 @@ func newAccountId(ctx context.Context, authMethodId, loginName string) (string, func newManagedGroupId(ctx context.Context) (string, error) { const op = "ldap.newManagedGroupId" - id, err := db.NewPublicId(ctx, globals.LdapManagedGroupPrefix) + id, err := db.NewPublicId(globals.LdapManagedGroupPrefix) if err != nil { return "", errors.Wrap(ctx, err, op) } diff --git a/internal/auth/ldap/service_authenticate_test.go b/internal/auth/ldap/service_authenticate_test.go index 133419dd1ea..b8d67c50c7a 100644 --- a/internal/auth/ldap/service_authenticate_test.go +++ b/internal/auth/ldap/service_authenticate_test.go @@ -33,10 +33,10 @@ func TestAuthenticate(t *testing.T) { return NewRepository(testCtx, testRw, testRw, testKms) } lookupUserWithFn := func() (LookupUser, error) { - return iam.NewRepository(testCtx, testRw, testRw, testKms) + return iam.NewRepository(testRw, testRw, testKms) } tokenCreatorFn := func() (AuthTokenCreator, error) { - return authtoken.NewRepository(testCtx, testRw, testRw, testKms) + return authtoken.NewRepository(testRw, testRw, testKms) } iamRepo := iam.TestRepo(t, testConn, rootWrapper) org, _ := iam.TestScopes(t, iamRepo) diff --git a/internal/auth/oidc/ids.go b/internal/auth/oidc/ids.go index 39bd6f6a197..2c62bd53203 100644 --- a/internal/auth/oidc/ids.go +++ b/internal/auth/oidc/ids.go @@ -25,7 +25,7 @@ const ( func newAuthMethodId(ctx context.Context) (string, error) { const op = "oidc.newAuthMethodId" - id, err := db.NewPublicId(ctx, globals.OidcAuthMethodPrefix) + id, err := db.NewPublicId(globals.OidcAuthMethodPrefix) if err != nil { return "", errors.Wrap(ctx, err, op) } @@ -43,7 +43,7 @@ func newAccountId(ctx context.Context, authMethodId, issuer, sub string) (string if sub == "" { return "", errors.New(ctx, errors.InvalidParameter, op, "missing subject") } - id, err := db.NewPublicId(ctx, globals.OidcAccountPrefix, db.WithPrngValues([]string{authMethodId, issuer, sub})) + id, err := db.NewPublicId(globals.OidcAccountPrefix, db.WithPrngValues([]string{authMethodId, issuer, sub})) if err != nil { return "", errors.Wrap(ctx, err, op) } @@ -52,7 +52,7 @@ func newAccountId(ctx context.Context, authMethodId, issuer, sub string) (string func newManagedGroupId(ctx context.Context) (string, error) { const op = "oidc.newManagedGroupId" - id, err := db.NewPublicId(ctx, globals.OidcManagedGroupPrefix) + id, err := db.NewPublicId(globals.OidcManagedGroupPrefix) if err != nil { return "", errors.Wrap(ctx, err, op) } diff --git a/internal/auth/oidc/service_callback_test.go b/internal/auth/oidc/service_callback_test.go index cdfeba68a4c..bed94512f10 100644 --- a/internal/auth/oidc/service_callback_test.go +++ b/internal/auth/oidc/service_callback_test.go @@ -43,13 +43,13 @@ func Test_Callback(t *testing.T) { // some standard factories for unit tests which // are used in the Callback(...) call iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } repoFn := func() (*Repository, error) { return NewRepository(ctx, rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } atRepo, err := atRepoFn() require.NoError(t, err) @@ -92,7 +92,7 @@ func Test_Callback(t *testing.T) { require.NoError(t, err) // a reusable token request id for the tests. - testTokenRequestId, err := authtoken.NewAuthTokenId(ctx) + testTokenRequestId, err := authtoken.NewAuthTokenId() require.NoError(t, err) // usuable nonce for the unit tests @@ -515,13 +515,13 @@ func Test_StartAuth_to_Callback(t *testing.T) { // func pointers for the test controller. iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } repoFn := func() (*Repository, error) { return NewRepository(ctx, rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } atRepo, err := atRepoFn() require.NoError(err) @@ -619,13 +619,13 @@ func Test_ManagedGroupFiltering(t *testing.T) { // some standard factories for unit tests which // are used in the Callback(...) call iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } repoFn := func() (*Repository, error) { return NewRepository(ctx, rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, rootWrapper) @@ -753,7 +753,7 @@ func Test_ManagedGroupFiltering(t *testing.T) { assert, require := assert.New(t), require.New(t) // A unique token ID for each test - testTokenRequestId, err := authtoken.NewAuthTokenId(ctx) + testTokenRequestId, err := authtoken.NewAuthTokenId() require.NoError(err) // the test provider is stateful, so we need to configure diff --git a/internal/auth/oidc/service_start_auth.go b/internal/auth/oidc/service_start_auth.go index 047b0b681f5..e7bb932fdd6 100644 --- a/internal/auth/oidc/service_start_auth.go +++ b/internal/auth/oidc/service_start_auth.go @@ -73,7 +73,7 @@ func StartAuth(ctx context.Context, oidcRepoFn OidcRepoFactory, authMethodId str now := time.Now() createTime := timestamppb.New(now.Truncate(time.Second)) exp := timestamppb.New(now.Add(AttemptExpiration).Truncate(time.Second)) - tokenRequestId, err := authtoken.NewAuthTokenId(ctx) + tokenRequestId, err := authtoken.NewAuthTokenId() if err != nil { return nil, "", errors.Wrap(ctx, err, op) } diff --git a/internal/auth/oidc/service_token_request_test.go b/internal/auth/oidc/service_token_request_test.go index e2a3c1abab6..8c60737cc6c 100644 --- a/internal/auth/oidc/service_token_request_test.go +++ b/internal/auth/oidc/service_token_request_test.go @@ -38,11 +38,11 @@ func Test_TokenRequest(t *testing.T) { require.NoError(t, err) atRepoFn := func() (*authtoken.Repository, error) { - r, err := authtoken.NewRepository(ctx, rw, rw, kmsCache) + r, err := authtoken.NewRepository(rw, rw, kmsCache) require.NoError(t, err) return r, nil } - testAtRepo, err := authtoken.NewRepository(ctx, rw, rw, kmsCache) + testAtRepo, err := authtoken.NewRepository(rw, rw, kmsCache) require.NoError(t, err) // a reusable test authmethod for the unit tests @@ -111,7 +111,7 @@ func Test_TokenRequest(t *testing.T) { atRepoFn: atRepoFn, authMethodId: "", tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return TestTokenRequestId(t, testAuthMethod, kmsCache, 200*time.Second, tokenPublicId) @@ -141,7 +141,7 @@ func Test_TokenRequest(t *testing.T) { atRepoFn: atRepoFn, authMethodId: testAuthMethod.PublicId, tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return TestTokenRequestId(t, testAuthMethod, kmsCache, 200*time.Second, tokenPublicId) @@ -155,7 +155,7 @@ func Test_TokenRequest(t *testing.T) { atRepoFn: atRepoFn, authMethodId: testAuthMethod.PublicId, tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return TestTokenRequestId(t, testAuthMethod, kmsCache, 0, tokenPublicId) @@ -171,7 +171,7 @@ func Test_TokenRequest(t *testing.T) { }, authMethodId: testAuthMethod.PublicId, tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return TestTokenRequestId(t, testAuthMethod, kmsCache, 200*time.Second, tokenPublicId) @@ -210,7 +210,7 @@ func Test_TokenRequest(t *testing.T) { atRepoFn: atRepoFn, authMethodId: testAuthMethod.PublicId, tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) reqTk := request.Token{ RequestId: tokenPublicId, @@ -304,7 +304,7 @@ func Test_TokenRequest(t *testing.T) { atRepoFn: atRepoFn, authMethodId: "not-a-match", tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return TestTokenRequestId(t, testAuthMethod, kmsCache, 200*time.Second, tokenPublicId) @@ -318,7 +318,7 @@ func Test_TokenRequest(t *testing.T) { atRepoFn: atRepoFn, authMethodId: testAuthMethod.PublicId, tokenRequest: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return TestTokenRequestId(t, testAuthMethod, kmsCache, 200*time.Second, tokenPublicId) diff --git a/internal/auth/password/account.go b/internal/auth/password/account.go index 168a3acffa4..3eb2012065c 100644 --- a/internal/auth/password/account.go +++ b/internal/auth/password/account.go @@ -4,8 +4,6 @@ package password import ( - "context" - "github.com/hashicorp/boundary/internal/auth/password/store" "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/oplog" @@ -30,12 +28,12 @@ func allocAccount() *Account { // NewAccount creates a new in memory Account. LoginName, name, and // description are the only valid options. All other options are ignored. -func NewAccount(ctx context.Context, authMethodId string, opt ...Option) (*Account, error) { +func NewAccount(authMethodId string, opt ...Option) (*Account, error) { const op = "password.NewAccount" // NOTE(mgaffney): The scopeId in the embedded *store.Account is // populated by a trigger in the database. if authMethodId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing auth method id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing auth method id") } opts := GetOpts(opt...) diff --git a/internal/auth/password/account_test.go b/internal/auth/password/account_test.go index 94df7ad370f..e9970c3b9a2 100644 --- a/internal/auth/password/account_test.go +++ b/internal/auth/password/account_test.go @@ -4,7 +4,6 @@ package password import ( - "context" "testing" "github.com/hashicorp/boundary/internal/auth/password/store" @@ -102,7 +101,7 @@ func TestAccount_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewAccount(context.Background(), tt.args.authMethodId, tt.args.opts...) + got, err := NewAccount(tt.args.authMethodId, tt.args.opts...) if tt.wantErr { assert.Error(err) require.Nil(got) diff --git a/internal/auth/password/argon2.go b/internal/auth/password/argon2.go index 210ce6787c3..770c1483318 100644 --- a/internal/auth/password/argon2.go +++ b/internal/auth/password/argon2.go @@ -45,28 +45,28 @@ func NewArgon2Configuration() *Argon2Configuration { } } -func (c *Argon2Configuration) validate(ctx context.Context) error { +func (c *Argon2Configuration) validate() error { const op = "password.(Argon2Configuration).validate" if c == nil { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing config") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing config") } if c.Argon2Configuration == nil { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing embedded config") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing embedded config") } if c.Iterations == 0 { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing iterations") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing iterations") } if c.Memory == 0 { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing memory") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing memory") } if c.Threads == 0 { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing threads") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing threads") } if c.SaltLength == 0 { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing salt length") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing salt length") } if c.KeyLength == 0 { - return errors.New(ctx, errors.PasswordInvalidConfiguration, op, "missing key length") + return errors.NewDeprecated(errors.PasswordInvalidConfiguration, op, "missing key length") } return nil } @@ -132,21 +132,21 @@ type Argon2Credential struct { tableName string } -func newArgon2Credential(ctx context.Context, accountId string, password string, conf *Argon2Configuration) (*Argon2Credential, error) { +func newArgon2Credential(accountId string, password string, conf *Argon2Configuration) (*Argon2Credential, error) { const op = "password.newArgon2Credential" if accountId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing accountId") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing accountId") } if password == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing password") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing password") } if conf == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing argon2 configuration") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing argon2 configuration") } - id, err := newArgon2CredentialId(ctx) + id, err := newArgon2CredentialId() if err != nil { - return nil, errors.Wrap(ctx, err, op) + return nil, errors.WrapDeprecated(err, op) } c := &Argon2Credential{ @@ -160,7 +160,7 @@ func newArgon2Credential(ctx context.Context, accountId string, password string, salt := make([]byte, conf.SaltLength) if _, err := rand.Read(salt); err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithCode(errors.Io)) + return nil, errors.WrapDeprecated(err, op, errors.WithCode(errors.Io)) } c.Salt = salt c.DerivedKey = argon2.IDKey([]byte(password), c.Salt, conf.Iterations, conf.Memory, uint8(conf.Threads), conf.KeyLength) @@ -190,7 +190,7 @@ func (c *Argon2Credential) SetTableName(n string) { func (c *Argon2Credential) encrypt(ctx context.Context, cipher wrapping.Wrapper) error { const op = "password.(Argon2Credential).encrypt" if err := structwrapping.WrapStruct(ctx, cipher, c.Argon2Credential, nil); err != nil { - return errors.Wrap(ctx, err, op, errors.WithCode(errors.Encrypt)) + return errors.WrapDeprecated(err, op, errors.WithCode(errors.Encrypt)) } keyId, err := cipher.KeyId(ctx) if err != nil { @@ -203,7 +203,7 @@ func (c *Argon2Credential) encrypt(ctx context.Context, cipher wrapping.Wrapper) func (c *Argon2Credential) decrypt(ctx context.Context, cipher wrapping.Wrapper) error { const op = "password.(Argon2Credential).decrypt" if err := structwrapping.UnwrapStruct(ctx, cipher, c.Argon2Credential, nil); err != nil { - return errors.Wrap(ctx, err, op, errors.WithCode(errors.Decrypt)) + return errors.WrapDeprecated(err, op, errors.WithCode(errors.Decrypt)) } return nil } diff --git a/internal/auth/password/argon2_test.go b/internal/auth/password/argon2_test.go index 9a26b59349d..a3289544033 100644 --- a/internal/auth/password/argon2_test.go +++ b/internal/auth/password/argon2_test.go @@ -53,7 +53,7 @@ func TestArgon2Configuration_New(t *testing.T) { got := NewArgon2Configuration() require.NotNil(got) var err error - got.PrivateId, err = newArgon2ConfigurationId(context.Background()) + got.PrivateId, err = newArgon2ConfigurationId() require.NoError(err) got.PasswordMethodId = authMethodId err = rw.Create(ctx, got) @@ -69,7 +69,7 @@ func TestArgon2Configuration_New(t *testing.T) { c1 := NewArgon2Configuration() require.NotNil(c1) - c1.PrivateId, err = newArgon2ConfigurationId(context.Background()) + c1.PrivateId, err = newArgon2ConfigurationId() require.NoError(err) c1.PasswordMethodId = authMethodId c1.Iterations = c1.Iterations + 1 @@ -79,7 +79,7 @@ func TestArgon2Configuration_New(t *testing.T) { c2 := NewArgon2Configuration() require.NotNil(c2) - c2.PrivateId, err = newArgon2ConfigurationId(context.Background()) + c2.PrivateId, err = newArgon2ConfigurationId() require.NoError(err) c2.PasswordMethodId = authMethodId c2.Memory = 32 * 1024 @@ -280,7 +280,7 @@ func TestArgon2Configuration_Validate(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got := tt.in.validate(context.Background()) + got := tt.in.validate() if tt.wantErr { require.Error(got) assert.Truef(errors.Match(errors.T(tt.wantErrIs), got), "want err code: %q got err: %q", tt.wantErrIs, got) @@ -294,7 +294,6 @@ func TestArgon2Configuration_Validate(t *testing.T) { func testArgon2Confs(t *testing.T, conn *db.DB, authMethodId string, count int) []*Argon2Configuration { t.Helper() - ctx := context.Background() assert, require := assert.New(t), require.New(t) rw := db.New(conn) var confs []*Argon2Configuration @@ -306,12 +305,12 @@ func testArgon2Confs(t *testing.T, conn *db.DB, authMethodId string, count int) conf := NewArgon2Configuration() require.NotNil(conf) conf.PasswordMethodId = authMethodId - conf.PrivateId, err = newArgon2ConfigurationId(ctx) + conf.PrivateId, err = newArgon2ConfigurationId() require.NoError(err) conf.Iterations = base.Iterations + uint32(i+1) conf.Threads = base.Threads + uint32(i+1) - err = rw.Create(ctx, conf) + err = rw.Create(context.Background(), conf) require.NoError(err) confs = append(confs, conf) } @@ -426,7 +425,7 @@ func TestArgon2Credential_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newArgon2Credential(context.Background(), tt.args.accountId, tt.args.password, tt.args.conf) + got, err := newArgon2Credential(tt.args.accountId, tt.args.password, tt.args.conf) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) diff --git a/internal/auth/password/authmethod.go b/internal/auth/password/authmethod.go index d5bc8ff3b1c..1523e53a4a7 100644 --- a/internal/auth/password/authmethod.go +++ b/internal/auth/password/authmethod.go @@ -4,8 +4,6 @@ package password import ( - "context" - "github.com/hashicorp/boundary/internal/auth/password/store" "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/oplog" @@ -29,10 +27,10 @@ func allocAuthMethod() AuthMethod { // Name and description are the only valid options. All other options are // ignored. MinLoginNameLength and MinPasswordLength are pre-set to the // default values of 5 and 8 respectively. -func NewAuthMethod(ctx context.Context, scopeId string, opt ...Option) (*AuthMethod, error) { +func NewAuthMethod(scopeId string, opt ...Option) (*AuthMethod, error) { const op = "password.NewAuthMethod" if scopeId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing scope id") } opts := GetOpts(opt...) diff --git a/internal/auth/password/authmethod_test.go b/internal/auth/password/authmethod_test.go index e0e5fa44880..347b40f8172 100644 --- a/internal/auth/password/authmethod_test.go +++ b/internal/auth/password/authmethod_test.go @@ -15,7 +15,6 @@ import ( ) func TestAuthMethod_New(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -78,7 +77,7 @@ func TestAuthMethod_New(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) org, _ := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) - got, err := NewAuthMethod(ctx, org.GetPublicId(), tt.args.opts...) + got, err := NewAuthMethod(org.GetPublicId(), tt.args.opts...) if tt.wantErr { assert.Error(err) require.Nil(got) @@ -92,7 +91,7 @@ func TestAuthMethod_New(t *testing.T) { assert.Emptyf(got.PublicId, "PublicId set") assert.Equal(tt.want, got) - id, err := newAuthMethodId(ctx) + id, err := newAuthMethodId() assert.NoError(err) tt.want.PublicId = id @@ -100,11 +99,12 @@ func TestAuthMethod_New(t *testing.T) { conf := NewArgon2Configuration() require.NotNil(conf) - conf.PrivateId, err = newArgon2ConfigurationId(context.Background()) + conf.PrivateId, err = newArgon2ConfigurationId() require.NoError(err) conf.PasswordMethodId = got.PublicId got.PasswordConfId = conf.PrivateId + ctx := context.Background() _, err2 := w.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(_ db.Reader, iw db.Writer) error { require.NoError(iw.Create(ctx, conf)) @@ -117,7 +117,7 @@ func TestAuthMethod_New(t *testing.T) { t.Run("blank-scopeId", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewAuthMethod(context.Background(), "") + got, err := NewAuthMethod("") assert.Error(err) require.Nil(got) }) diff --git a/internal/auth/password/private_ids.go b/internal/auth/password/private_ids.go index 65eed39c44c..6d171fb91aa 100644 --- a/internal/auth/password/private_ids.go +++ b/internal/auth/password/private_ids.go @@ -4,8 +4,6 @@ package password import ( - "context" - "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" ) @@ -16,20 +14,20 @@ const ( argon2CredentialPrefix = "arg2cred" ) -func newArgon2ConfigurationId(ctx context.Context) (string, error) { +func newArgon2ConfigurationId() (string, error) { const op = "password.newArgon2ConfigurationId" - id, err := db.NewPrivateId(ctx, argon2ConfigurationPrefix) + id, err := db.NewPrivateId(argon2ConfigurationPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } -func newArgon2CredentialId(ctx context.Context) (string, error) { +func newArgon2CredentialId() (string, error) { const op = "password.newArgon2CredentialId" - id, err := db.NewPrivateId(ctx, argon2CredentialPrefix) + id, err := db.NewPrivateId(argon2CredentialPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } diff --git a/internal/auth/password/private_ids_test.go b/internal/auth/password/private_ids_test.go index 03d13d18de9..1beba520362 100644 --- a/internal/auth/password/private_ids_test.go +++ b/internal/auth/password/private_ids_test.go @@ -4,7 +4,6 @@ package password import ( - "context" "strings" "testing" @@ -13,14 +12,13 @@ import ( ) func Test_PrivateIds(t *testing.T) { - ctx := context.Background() t.Run("argon2Config", func(t *testing.T) { - id, err := newArgon2ConfigurationId(ctx) + id, err := newArgon2ConfigurationId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, argon2ConfigurationPrefix+"_")) }) t.Run("argon2Cred", func(t *testing.T) { - id, err := newArgon2CredentialId(ctx) + id, err := newArgon2CredentialId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, argon2CredentialPrefix+"_")) }) diff --git a/internal/auth/password/public_ids.go b/internal/auth/password/public_ids.go index 8f0ab8b3987..3bcac8f9d96 100644 --- a/internal/auth/password/public_ids.go +++ b/internal/auth/password/public_ids.go @@ -4,8 +4,6 @@ package password import ( - "context" - "github.com/hashicorp/boundary/globals" "github.com/hashicorp/boundary/internal/auth" "github.com/hashicorp/boundary/internal/db" @@ -24,20 +22,20 @@ const ( Subtype = subtypes.Subtype("password") ) -func newAuthMethodId(ctx context.Context) (string, error) { +func newAuthMethodId() (string, error) { const op = "password.newAuthMethodId" - id, err := db.NewPublicId(ctx, globals.PasswordAuthMethodPrefix) + id, err := db.NewPublicId(globals.PasswordAuthMethodPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } -func newAccountId(ctx context.Context) (string, error) { +func newAccountId() (string, error) { const op = "password.newAccountId" - id, err := db.NewPublicId(ctx, globals.PasswordAccountPrefix) + id, err := db.NewPublicId(globals.PasswordAccountPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } diff --git a/internal/auth/password/public_ids_test.go b/internal/auth/password/public_ids_test.go index 0051f7a0902..b4d5135bf06 100644 --- a/internal/auth/password/public_ids_test.go +++ b/internal/auth/password/public_ids_test.go @@ -4,7 +4,6 @@ package password import ( - "context" "strings" "testing" @@ -14,14 +13,13 @@ import ( ) func Test_PublicIds(t *testing.T) { - ctx := context.Background() t.Run("authMethod", func(t *testing.T) { - id, err := newAuthMethodId(ctx) + id, err := newAuthMethodId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, globals.PasswordAuthMethodPrefix+"_")) }) t.Run("account", func(t *testing.T) { - id, err := newAccountId(ctx) + id, err := newAccountId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, globals.PasswordAccountPrefix+"_")) }) diff --git a/internal/auth/password/repository.go b/internal/auth/password/repository.go index b11ec3699b1..bf67432bb58 100644 --- a/internal/auth/password/repository.go +++ b/internal/auth/password/repository.go @@ -4,7 +4,6 @@ package password import ( - "context" "strings" "github.com/hashicorp/boundary/internal/db" @@ -26,15 +25,15 @@ type Repository struct { // only be used for one transaction and it is not safe for concurrent go // routines to access it. WithLimit option is used as a repo wide default // limit applied to all ListX methods. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { const op = "password.NewRepository" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := GetOpts(opt...) diff --git a/internal/auth/password/repository_account.go b/internal/auth/password/repository_account.go index fea91c54ba8..b53515a0a93 100644 --- a/internal/auth/password/repository_account.go +++ b/internal/auth/password/repository_account.go @@ -69,7 +69,7 @@ func (r *Repository) CreateAccount(ctx context.Context, scopeId string, a *Accou } a.PublicId = opts.withPublicId } else { - id, err := newAccountId(ctx) + id, err := newAccountId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -81,7 +81,7 @@ func (r *Repository) CreateAccount(ctx context.Context, scopeId string, a *Accou if cc.MinPasswordLength > len(opts.password) { return nil, errors.New(ctx, errors.PasswordTooShort, op, fmt.Sprintf("must be longer than %v", cc.MinPasswordLength)) } - if cred, err = newArgon2Credential(ctx, a.PublicId, opts.password, cc.argon2()); err != nil { + if cred, err = newArgon2Credential(a.PublicId, opts.password, cc.argon2()); err != nil { return nil, errors.Wrap(ctx, err, op) } } diff --git a/internal/auth/password/repository_account_test.go b/internal/auth/password/repository_account_test.go index 4f67a0ca762..d16c52b4f4b 100644 --- a/internal/auth/password/repository_account_test.go +++ b/internal/auth/password/repository_account_test.go @@ -234,7 +234,7 @@ func TestRepository_CreateAccount(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) got, err := repo.CreateAccount(context.Background(), org.GetPublicId(), tt.in, tt.opts...) @@ -274,7 +274,7 @@ func TestRepository_CreateAccount_DuplicateNames(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -306,7 +306,7 @@ func TestRepository_CreateAccount_DuplicateNames(t *testing.T) { t.Run("valid-duplicate-names-diff-parents", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -344,7 +344,6 @@ func TestRepository_CreateAccount_DuplicateNames(t *testing.T) { } func TestRepository_LookupAccount(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -356,7 +355,7 @@ func TestRepository_LookupAccount(t *testing.T) { authMethod := TestAuthMethods(t, conn, org.GetPublicId(), 1)[0] account := TestAccount(t, conn, authMethod.GetPublicId(), "name1") - newAcctId, err := newAccountId(ctx) + newAcctId, err := newAccountId() require.NoError(t, err) tests := []struct { name string @@ -385,10 +384,10 @@ func TestRepository_LookupAccount(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.LookupAccount(ctx, tt.in) + got, err := repo.LookupAccount(context.Background(), tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -401,7 +400,6 @@ func TestRepository_LookupAccount(t *testing.T) { } func TestRepository_DeleteAccount(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -413,7 +411,7 @@ func TestRepository_DeleteAccount(t *testing.T) { authMethod := TestAuthMethods(t, conn, org.GetPublicId(), 1)[0] account := TestAccount(t, conn, authMethod.GetPublicId(), "name1") - newAcctId, err := newAccountId(ctx) + newAcctId, err := newAccountId() require.NoError(t, err) tests := []struct { name string @@ -443,10 +441,10 @@ func TestRepository_DeleteAccount(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.DeleteAccount(ctx, org.GetPublicId(), tt.in) + got, err := repo.DeleteAccount(context.Background(), org.GetPublicId(), tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -501,7 +499,7 @@ func TestRepository_ListAccounts(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) got, err := repo.ListAccounts(context.Background(), tt.in, tt.opts...) @@ -578,7 +576,7 @@ func TestRepository_ListAccounts_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(context.Background(), rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) got, err := repo.ListAccounts(context.Background(), am.GetPublicId(), tt.listOpts...) @@ -917,7 +915,7 @@ func TestRepository_UpdateAccount(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -976,7 +974,6 @@ func TestRepository_UpdateAccount(t *testing.T) { } func TestRepository_UpdateAccount_DupeNames(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -985,7 +982,7 @@ func TestRepository_UpdateAccount_DupeNames(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -1017,7 +1014,7 @@ func TestRepository_UpdateAccount_DupeNames(t *testing.T) { t.Run("valid-duplicate-names-diff-AuthMethods", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -1063,7 +1060,7 @@ func TestRepository_UpdateAccount_DupeNames(t *testing.T) { t.Run("invalid-duplicate-loginnames", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -1095,7 +1092,7 @@ func TestRepository_UpdateAccount_DupeNames(t *testing.T) { t.Run("valid-duplicate-loginnames-diff-AuthMethods", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -1137,7 +1134,7 @@ func TestRepository_UpdateAccount_DupeNames(t *testing.T) { t.Run("change-authmethod-id", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) diff --git a/internal/auth/password/repository_authmethod.go b/internal/auth/password/repository_authmethod.go index ec2c60666fd..c86b3838c49 100644 --- a/internal/auth/password/repository_authmethod.go +++ b/internal/auth/password/repository_authmethod.go @@ -50,7 +50,7 @@ func (r *Repository) CreateAuthMethod(ctx context.Context, m *AuthMethod, opt .. } m.PublicId = opts.withPublicId } else { - id, err := newAuthMethodId(ctx) + id, err := newAuthMethodId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -61,12 +61,12 @@ func (r *Repository) CreateAuthMethod(ctx context.Context, m *AuthMethod, opt .. if !ok { return nil, errors.New(ctx, errors.PasswordUnsupportedConfiguration, op, "unknown configuration") } - if err := c.validate(ctx); err != nil { + if err := c.validate(); err != nil { return nil, errors.Wrap(ctx, err, op) } var err error - c.PrivateId, err = newArgon2ConfigurationId(ctx) + c.PrivateId, err = newArgon2ConfigurationId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/auth/password/repository_authmethod_test.go b/internal/auth/password/repository_authmethod_test.go index 963bac590fc..65d97f7a99b 100644 --- a/internal/auth/password/repository_authmethod_test.go +++ b/internal/auth/password/repository_authmethod_test.go @@ -25,7 +25,6 @@ import ( ) func TestRepository_CreateAuthMethod(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -146,10 +145,10 @@ func TestRepository_CreateAuthMethod(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.CreateAuthMethod(ctx, tt.in, tt.opts...) + got, err := repo.CreateAuthMethod(context.Background(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -168,7 +167,6 @@ func TestRepository_CreateAuthMethod(t *testing.T) { } func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -177,7 +175,7 @@ func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -189,7 +187,7 @@ func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { }, } - got, err := repo.CreateAuthMethod(ctx, in) + got, err := repo.CreateAuthMethod(context.Background(), in) require.NoError(err) require.NotNil(got) assertPublicId(t, globals.PasswordAuthMethodPrefix, got.PublicId) @@ -198,14 +196,14 @@ func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { assert.Equal(in.Description, got.Description) assert.Equal(got.CreateTime, got.UpdateTime) - got2, err := repo.CreateAuthMethod(ctx, in) + got2, err := repo.CreateAuthMethod(context.Background(), in) assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "Unexpected error %s", err) assert.Nil(got2) }) t.Run("valid-duplicate-names-diff-scopes", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -218,7 +216,7 @@ func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { in2 := in.Clone() in.ScopeId = org1.GetPublicId() - got, err := repo.CreateAuthMethod(ctx, in) + got, err := repo.CreateAuthMethod(context.Background(), in) require.NoError(err) require.NotNil(got) assertPublicId(t, globals.PasswordAuthMethodPrefix, got.PublicId) @@ -229,7 +227,7 @@ func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { org2, _ := iam.TestScopes(t, iamRepo) in2.ScopeId = org2.GetPublicId() - got2, err := repo.CreateAuthMethod(ctx, in2) + got2, err := repo.CreateAuthMethod(context.Background(), in2) require.NoError(err) require.NotNil(got2) assertPublicId(t, globals.PasswordAuthMethodPrefix, got2.PublicId) @@ -241,7 +239,6 @@ func TestRepository_CreateAuthMethod_DupeNames(t *testing.T) { } func TestRepository_CreateAuthMethod_PublicId(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -250,18 +247,18 @@ func TestRepository_CreateAuthMethod_PublicId(t *testing.T) { t.Run("valid-with-publicid", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) org1, _ := iam.TestScopes(t, iamRepo) in := allocAuthMethod() - amId, err := newAuthMethodId(ctx) + amId, err := newAuthMethodId() require.NoError(err) in.ScopeId = org1.GetPublicId() - got, err := repo.CreateAuthMethod(ctx, &in, WithPublicId(amId)) + got, err := repo.CreateAuthMethod(context.Background(), &in, WithPublicId(amId)) require.NoError(err) require.NotNil(got) assert.Equal(amId, got.GetPublicId()) @@ -270,7 +267,7 @@ func TestRepository_CreateAuthMethod_PublicId(t *testing.T) { t.Run("invalid-with-badpublicid", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -278,7 +275,7 @@ func TestRepository_CreateAuthMethod_PublicId(t *testing.T) { in := allocAuthMethod() in.ScopeId = org1.GetPublicId() - got, err := repo.CreateAuthMethod(ctx, &in, WithPublicId("invalid_idwithabadprefix")) + got, err := repo.CreateAuthMethod(context.Background(), &in, WithPublicId("invalid_idwithabadprefix")) assert.Error(err) assert.Nil(got) assert.Truef(errors.Match(errors.T(errors.InvalidPublicId), err), "Unexpected error %s", err) @@ -286,7 +283,6 @@ func TestRepository_CreateAuthMethod_PublicId(t *testing.T) { } func TestRepository_LookupAuthMethod(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -295,7 +291,7 @@ func TestRepository_LookupAuthMethod(t *testing.T) { o, _ := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) authMethod := TestAuthMethods(t, conn, o.GetPublicId(), 1)[0] - amId, err := newAuthMethodId(ctx) + amId, err := newAuthMethodId() require.NoError(t, err) tests := []struct { name string @@ -324,10 +320,10 @@ func TestRepository_LookupAuthMethod(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.LookupAuthMethod(ctx, tt.in) + got, err := repo.LookupAuthMethod(context.Background(), tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -340,7 +336,6 @@ func TestRepository_LookupAuthMethod(t *testing.T) { } func TestRepository_DeleteAuthMethod(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -349,7 +344,7 @@ func TestRepository_DeleteAuthMethod(t *testing.T) { o, _ := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) authMethod := TestAuthMethods(t, conn, o.GetPublicId(), 1)[0] - newAuthMethodId, err := newAuthMethodId(ctx) + newAuthMethodId, err := newAuthMethodId() require.NoError(t, err) tests := []struct { name string @@ -379,10 +374,10 @@ func TestRepository_DeleteAuthMethod(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.DeleteAuthMethod(ctx, o.GetPublicId(), tt.in) + got, err := repo.DeleteAuthMethod(context.Background(), o.GetPublicId(), tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -395,7 +390,6 @@ func TestRepository_DeleteAuthMethod(t *testing.T) { } func TestRepository_ListAuthMethods(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -434,10 +428,10 @@ func TestRepository_ListAuthMethods(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListAuthMethods(ctx, tt.in, tt.opts...) + got, err := repo.ListAuthMethods(context.Background(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -450,13 +444,12 @@ func TestRepository_ListAuthMethods(t *testing.T) { } func TestRepository_ListAuthMethods_Multiple_Scopes(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) iamRepo := iam.TestRepo(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) var total int @@ -471,7 +464,7 @@ func TestRepository_ListAuthMethods_Multiple_Scopes(t *testing.T) { iam.TestSetPrimaryAuthMethod(t, iam.TestRepo(t, conn, wrapper), o, ams[0].PublicId) total += numPerScope } - got, err := repo.ListAuthMethods(ctx, scopeIds, WithOrderByCreateTime(true)) + got, err := repo.ListAuthMethods(context.Background(), scopeIds, WithOrderByCreateTime(true)) require.NoError(t, err) assert.Equal(t, total, len(got)) found := map[string]struct{}{} @@ -489,7 +482,6 @@ func TestRepository_ListAuthMethods_Multiple_Scopes(t *testing.T) { } func TestRepository_ListAuthMethods_Limits(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -551,10 +543,10 @@ func TestRepository_ListAuthMethods_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListAuthMethods(ctx, []string{ams[0].GetScopeId()}, tt.listOpts...) + got, err := repo.ListAuthMethods(context.Background(), []string{ams[0].GetScopeId()}, tt.listOpts...) require.NoError(err) assert.Len(got, tt.wantLen) if tt.wantLen > 0 { @@ -566,15 +558,16 @@ func TestRepository_ListAuthMethods_Limits(t *testing.T) { func TestRepository_UpdateAuthMethod(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) + ctx := context.Background() + type args struct { updates *store.AuthMethod fieldMaskPaths []string @@ -695,7 +688,7 @@ func TestRepository_UpdateAuthMethod(t *testing.T) { args: args{ updates: &store.AuthMethod{ PublicId: func() string { - s, err := newAuthMethodId(ctx) + s, err := newAuthMethodId() require.NoError(t, err) return s }(), @@ -748,13 +741,13 @@ func TestRepository_UpdateAuthMethod(t *testing.T) { // create the initial auth method o, _ := iam.TestScopes(t, iamRepo) - am, err := NewAuthMethod(ctx, o.GetPublicId(), WithName("default"), WithDescription("default")) + am, err := NewAuthMethod(o.GetPublicId(), WithName("default"), WithDescription("default")) require.NoError(err) origAM, err := repo.CreateAuthMethod(ctx, am) require.NoError(err) assert.EqualValues(1, origAM.Version) - amToUpdate, err := NewAuthMethod(ctx, o.GetPublicId()) + amToUpdate, err := NewAuthMethod(o.GetPublicId()) require.NoError(err) amToUpdate.PublicId = origAM.GetPublicId() amToUpdate.Version = origAM.Version diff --git a/internal/auth/password/repository_configuration.go b/internal/auth/password/repository_configuration.go index 523be157007..8e7e39dca3c 100644 --- a/internal/auth/password/repository_configuration.go +++ b/internal/auth/password/repository_configuration.go @@ -19,7 +19,7 @@ import ( // the only configuration type. type Configuration interface { AuthMethodId() string - validate(context.Context) error + validate() error } // GetConfiguration returns the current configuration for authMethodId. @@ -53,7 +53,7 @@ func (r *Repository) SetConfiguration(ctx context.Context, scopeId string, c Con if c.AuthMethodId() == "" { return nil, errors.New(ctx, errors.InvalidParameter, op, "missing auth method id") } - if err := c.validate(ctx); err != nil { + if err := c.validate(); err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -73,7 +73,7 @@ func (r *Repository) setArgon2Conf(ctx context.Context, scopeId string, c *Argon const op = "password.(Repository).setArgon2Conf" c = c.clone() - id, err := newArgon2ConfigurationId(ctx) + id, err := newArgon2ConfigurationId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/auth/password/repository_configuration_test.go b/internal/auth/password/repository_configuration_test.go index 9895dfa2830..264e73d8944 100644 --- a/internal/auth/password/repository_configuration_test.go +++ b/internal/auth/password/repository_configuration_test.go @@ -23,7 +23,7 @@ func TestRepository_GetSetConfiguration(t *testing.T) { rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(t, err) require.NotNil(t, repo) @@ -129,7 +129,7 @@ func TestRepository_GetConfiguration(t *testing.T) { rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(t, err) require.NotNil(t, repo) @@ -191,8 +191,8 @@ func TestRepository_GetConfiguration(t *testing.T) { type tconf int -func (t tconf) AuthMethodId() string { return "abcdefghijk" } -func (t tconf) validate(context.Context) error { return nil } +func (t tconf) AuthMethodId() string { return "abcdefghijk" } +func (t tconf) validate() error { return nil } var _ Configuration = tconf(0) @@ -201,7 +201,7 @@ func TestRepository_SetConfiguration(t *testing.T) { rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(t, err) require.NotNil(t, repo) diff --git a/internal/auth/password/repository_password.go b/internal/auth/password/repository_password.go index 4d171ed1fa9..97da2372e4d 100644 --- a/internal/auth/password/repository_password.go +++ b/internal/auth/password/repository_password.go @@ -68,7 +68,7 @@ func (r *Repository) Authenticate(ctx context.Context, scopeId, authMethodId, lo if err != nil { return acct.Account, errors.Wrap(ctx, err, op, errors.WithMsg("retrieve current password configuration")) } - cred, err := newArgon2Credential(ctx, acct.PublicId, password, cc.argon2()) + cred, err := newArgon2Credential(acct.PublicId, password, cc.argon2()) if err != nil { return acct.Account, errors.Wrap(ctx, err, op, errors.WithCode(errors.PasswordInvalidConfiguration)) } @@ -166,7 +166,7 @@ func (r *Repository) ChangePassword(ctx context.Context, scopeId, accountId, old if cc.MinPasswordLength > len(new) { return nil, errors.New(ctx, errors.PasswordTooShort, op, fmt.Sprintf("must be at least %d", cc.MinPasswordLength)) } - newCred, err := newArgon2Credential(ctx, accountId, new, cc.argon2()) + newCred, err := newArgon2Credential(accountId, new, cc.argon2()) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -294,7 +294,7 @@ func (r *Repository) SetPassword(ctx context.Context, scopeId, accountId, passwo if cc.MinPasswordLength > len(password) { return nil, errors.New(ctx, errors.PasswordTooShort, op, fmt.Sprintf("password must be at least %v", cc.MinPasswordLength)) } - newCred, err = newArgon2Credential(ctx, accountId, password, cc.argon2()) + newCred, err = newArgon2Credential(accountId, password, cc.argon2()) if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/auth/password/repository_password_test.go b/internal/auth/password/repository_password_test.go index 05dd57f80c9..481800e799f 100644 --- a/internal/auth/password/repository_password_test.go +++ b/internal/auth/password/repository_password_test.go @@ -35,7 +35,7 @@ func TestRepository_Authenticate(t *testing.T) { } passwd := "12345678" - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(t, err) require.NotNil(t, repo) outAcct, err := repo.CreateAccount(context.Background(), o.GetPublicId(), inAcct, WithPassword(passwd)) @@ -145,7 +145,7 @@ func TestRepository_AuthenticateRehash(t *testing.T) { passwd := "12345678" ctx := context.Background() - repo, err := NewRepository(context.Background(), rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) assert.NoError(err) require.NotNil(repo) @@ -261,7 +261,7 @@ func TestRepository_ChangePassword(t *testing.T) { wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) require.NotNil(t, repo) @@ -424,7 +424,7 @@ func TestRepository_SetPassword(t *testing.T) { wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(context.Background(), rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) require.NotNil(t, repo) diff --git a/internal/auth/password/repository_test.go b/internal/auth/password/repository_test.go index 2696b8ae8f4..7be6b7a61e5 100644 --- a/internal/auth/password/repository_test.go +++ b/internal/auth/password/repository_test.go @@ -4,7 +4,6 @@ package password import ( - "context" "testing" "github.com/hashicorp/boundary/internal/db" @@ -113,7 +112,7 @@ func TestRepository_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRepository(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) diff --git a/internal/auth/password/rewrapping_test.go b/internal/auth/password/rewrapping_test.go index f731c892712..9f216afa3a5 100644 --- a/internal/auth/password/rewrapping_test.go +++ b/internal/auth/password/rewrapping_test.go @@ -52,11 +52,11 @@ func TestRewrap_argon2ConfigRewrapFn(t *testing.T) { wrapper, _ = kmsCache.GetWrapper(context.Background(), org.GetPublicId(), 1) // actually store it - cred, err := newArgon2Credential(ctx, acct.PublicId, "this is a password", conf) + cred, err := newArgon2Credential(acct.PublicId, "this is a password", conf) require.NoError(t, err) - require.NoError(t, cred.encrypt(ctx, wrapper)) - assert.NoError(t, rw.Create(ctx, cred)) + require.NoError(t, cred.encrypt(context.Background(), wrapper)) + assert.NoError(t, rw.Create(context.Background(), cred)) // now things are stored in the db, we can rotate and rewrap assert.NoError(t, kmsCache.RotateKeys(ctx, org.Scope.GetPublicId())) diff --git a/internal/auth/password/testing.go b/internal/auth/password/testing.go index a86432d410a..e302a68b0a0 100644 --- a/internal/auth/password/testing.go +++ b/internal/auth/password/testing.go @@ -18,24 +18,24 @@ import ( // auth methods, the test will fail. func TestAuthMethod(t testing.TB, conn *db.DB, scopeId string, opt ...Option) *AuthMethod { t.Helper() - ctx := context.Background() assert, require := assert.New(t), require.New(t) w := db.New(conn) - cat, err := NewAuthMethod(ctx, scopeId, opt...) + cat, err := NewAuthMethod(scopeId, opt...) assert.NoError(err) require.NotNil(cat) - id, err := newAuthMethodId(ctx) + id, err := newAuthMethodId() assert.NoError(err) require.NotEmpty(id) cat.PublicId = id conf := NewArgon2Configuration() require.NotNil(conf) - conf.PrivateId, err = newArgon2ConfigurationId(ctx) + conf.PrivateId, err = newArgon2ConfigurationId() require.NoError(err) conf.PasswordMethodId = cat.PublicId cat.PasswordConfId = conf.PrivateId + ctx := context.Background() _, err2 := w.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(_ db.Reader, iw db.Writer) error { require.NoError(iw.Create(ctx, conf)) @@ -77,19 +77,19 @@ func TestMultipleAccounts(t testing.TB, conn *db.DB, authMethodId string, count // If any errors are encountered during the creation of the account, the test will fail. func TestAccount(t testing.TB, conn *db.DB, authMethodId, loginName string, opt ...Option) *Account { t.Helper() - ctx := context.Background() assert, require := assert.New(t), require.New(t) require.NotEmpty(loginName) w := db.New(conn) opt = append(opt, WithLoginName(loginName)) - cat, err := NewAccount(context.Background(), authMethodId, opt...) + cat, err := NewAccount(authMethodId, opt...) assert.NoError(err) require.NotNil(cat) - id, err := newAccountId(ctx) + id, err := newAccountId() assert.NoError(err) require.NotEmpty(id) cat.PublicId = id + ctx := context.Background() _, err2 := w.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(_ db.Reader, iw db.Writer) error { return iw.Create(ctx, cat) diff --git a/internal/authtoken/authtoken.go b/internal/authtoken/authtoken.go index a7b0e2f7841..2e032ef7eb5 100644 --- a/internal/authtoken/authtoken.go +++ b/internal/authtoken/authtoken.go @@ -77,7 +77,7 @@ func (at *AuthToken) encrypt(ctx context.Context, cipher wrapping.Wrapper) error const op = "authtoken.(writableAuthToken).encrypt" // structwrapping doesn't support embedding, so we'll pass in the store.Entry directly if err := structwrapping.WrapStruct(ctx, cipher, at.AuthToken, nil); err != nil { - return errors.Wrap(ctx, err, op, errors.WithCode(errors.Encrypt)) + return errors.WrapDeprecated(err, op, errors.WithCode(errors.Encrypt)) } keyId, err := cipher.KeyId(ctx) if err != nil { @@ -92,7 +92,7 @@ func (at *AuthToken) decrypt(ctx context.Context, cipher wrapping.Wrapper) error const op = "authtoken.(AuthToken).decrypt" // structwrapping doesn't support embedding, so we'll pass in the store.Entry directly if err := structwrapping.UnwrapStruct(ctx, cipher, at.AuthToken, nil); err != nil { - return errors.Wrap(ctx, err, op, errors.WithCode(errors.Decrypt)) + return errors.WrapDeprecated(err, op, errors.WithCode(errors.Decrypt)) } return nil } @@ -104,22 +104,22 @@ const ( ) // NewAuthTokenId creates a new id for an auth token. -func NewAuthTokenId(ctx context.Context) (string, error) { +func NewAuthTokenId() (string, error) { const op = "authtoken.newAuthTokenId" - id, err := db.NewPublicId(ctx, globals.AuthTokenPrefix) + id, err := db.NewPublicId(globals.AuthTokenPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } // newAuthToken generates a new in-memory token. The WithStatus option is // support and all other options are ignored. -func newAuthToken(ctx context.Context, opt ...Option) (*AuthToken, error) { +func newAuthToken(opt ...Option) (*AuthToken, error) { const op = "authtoken.newAuthToken" token, err := base62.Random(tokenLength) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithCode(errors.Io)) + return nil, errors.WrapDeprecated(err, op, errors.WithCode(errors.Io)) } opts := getOpts(opt...) @@ -145,22 +145,22 @@ func EncryptToken(ctx context.Context, kmsCache *kms.Kms, scopeId, publicId, tok marshaledS1Info, err := proto.Marshal(s1Info) if err != nil { - return "", errors.Wrap(ctx, err, op, errors.WithMsg("marshaling encrypted token"), errors.WithCode(errors.Encode)) + return "", errors.WrapDeprecated(err, op, errors.WithMsg("marshaling encrypted token"), errors.WithCode(errors.Encode)) } tokenWrapper, err := kmsCache.GetWrapper(ctx, scopeId, kms.KeyPurposeTokens) if err != nil { - return "", errors.Wrap(ctx, err, op, errors.WithMsg("unable to get wrapper")) + return "", errors.WrapDeprecated(err, op, errors.WithMsg("unable to get wrapper")) } blobInfo, err := tokenWrapper.Encrypt(ctx, []byte(marshaledS1Info), wrapping.WithAad([]byte(publicId))) if err != nil { - return "", errors.Wrap(ctx, err, op, errors.WithMsg("marshaling token info"), errors.WithCode(errors.Encrypt)) + return "", errors.WrapDeprecated(err, op, errors.WithMsg("marshaling token info"), errors.WithCode(errors.Encrypt)) } marshaledBlob, err := proto.Marshal(blobInfo) if err != nil { - return "", errors.Wrap(ctx, err, op, errors.WithMsg("marshaling encrypted token"), errors.WithCode(errors.Encode)) + return "", errors.WrapDeprecated(err, op, errors.WithMsg("marshaling encrypted token"), errors.WithCode(errors.Encode)) } encoded := base58.FastBase58Encoding(marshaledBlob) diff --git a/internal/authtoken/authtoken_test.go b/internal/authtoken/authtoken_test.go index 77d80ecfbbc..acf132c5896 100644 --- a/internal/authtoken/authtoken_test.go +++ b/internal/authtoken/authtoken_test.go @@ -25,7 +25,6 @@ import ( // placed in repository_test.go func TestAuthToken_DbUpdate(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -35,7 +34,7 @@ func TestAuthToken_DbUpdate(t *testing.T) { am := password.TestAuthMethods(t, conn, org.GetPublicId(), 1)[0] acct := password.TestAccount(t, conn, am.GetPublicId(), "name1") - newAuthTokId, err := NewAuthTokenId(ctx) + newAuthTokId, err := NewAuthTokenId() require.NoError(t, err) type args struct { @@ -113,9 +112,9 @@ func TestAuthToken_DbUpdate(t *testing.T) { authTok := TestAuthToken(t, conn, kms, org.GetPublicId()) proto.Merge(authTok.AuthToken, tt.args.authTok) - err := authTok.encrypt(ctx, wrapper) + err := authTok.encrypt(context.Background(), wrapper) require.NoError(t, err) - cnt, err := w.Update(ctx, authTok, tt.args.fieldMask, tt.args.nullMask) + cnt, err := w.Update(context.Background(), authTok, tt.args.fieldMask, tt.args.nullMask) if tt.wantErr { t.Logf("Got error :%v", err) assert.Error(err) @@ -128,7 +127,6 @@ func TestAuthToken_DbUpdate(t *testing.T) { } func TestAuthToken_DbCreate(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rootWrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, rootWrapper) @@ -140,7 +138,7 @@ func TestAuthToken_DbCreate(t *testing.T) { createdAuthToken := TestAuthToken(t, conn, kms, org.GetPublicId()) testAuthTokenId := func() string { - id, err := NewAuthTokenId(ctx) + id, err := NewAuthTokenId() require.NoError(t, err) return id } @@ -187,10 +185,9 @@ func TestAuthToken_DbCreate(t *testing.T) { } func TestAuthToken_DbDelete(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") testAuthTokenId := func() string { - id, err := NewAuthTokenId(ctx) + id, err := NewAuthTokenId() require.NoError(t, err) return id } @@ -234,7 +231,7 @@ func TestAuthToken_DbDelete(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - cnt, err := db.New(conn).Delete(ctx, tt.at) + cnt, err := db.New(conn).Delete(context.Background(), tt.at) assert.Equal(tt.wantCnt, cnt) if tt.wantError { assert.Error(err) diff --git a/internal/authtoken/repository.go b/internal/authtoken/repository.go index a5c531694df..6aeefaf9dc8 100644 --- a/internal/authtoken/repository.go +++ b/internal/authtoken/repository.go @@ -34,15 +34,15 @@ type Repository struct { // NewRepository creates a new Repository. The returned repository is not safe for concurrent go // routines to access it. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { const op = "authtoken.NewRepository" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil db reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil db reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil db writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil db writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil kms") } opts := getOpts(opt...) @@ -74,14 +74,14 @@ func (r *Repository) CreateAuthToken(ctx context.Context, withIamUser *iam.User, if withAuthAccountId == "" { return nil, errors.New(ctx, errors.InvalidParameter, op, "missing auth account id") } - at, err := newAuthToken(ctx) + at, err := newAuthToken() if err != nil { return nil, errors.Wrap(ctx, err, op) } at.AuthAccountId = withAuthAccountId opts := getOpts(opt...) if opts.withPublicId == "" { - id, err := NewAuthTokenId(ctx) + id, err := NewAuthTokenId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/authtoken/repository_test.go b/internal/authtoken/repository_test.go index 6c800ebcee7..bf3d1b4f222 100644 --- a/internal/authtoken/repository_test.go +++ b/internal/authtoken/repository_test.go @@ -170,7 +170,7 @@ func TestRepository_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - got, err := NewRepository(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -184,7 +184,6 @@ func TestRepository_New(t *testing.T) { } func TestRepository_CreateAuthToken(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -200,7 +199,7 @@ func TestRepository_CreateAuthToken(t *testing.T) { org2, _ := iam.TestScopes(t, repo) u2 := iam.TestUser(t, repo, org2.GetPublicId()) - testId, err := NewAuthTokenId(ctx) + testId, err := NewAuthTokenId() require.NoError(t, err) tests := []struct { @@ -267,10 +266,10 @@ func TestRepository_CreateAuthToken(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.CreateAuthToken(ctx, tt.iamUser, tt.authAcctId, tt.opt...) + got, err := repo.CreateAuthToken(context.Background(), tt.iamUser, tt.authAcctId, tt.opt...) if tt.wantErr { assert.Error(err) assert.Nil(got) @@ -298,7 +297,6 @@ func TestRepository_CreateAuthToken(t *testing.T) { } func TestRepository_LookupAuthToken(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -310,7 +308,7 @@ func TestRepository_LookupAuthToken(t *testing.T) { at.CtToken = nil at.KeyId = "" - badId, err := NewAuthTokenId(ctx) + badId, err := NewAuthTokenId() require.NoError(t, err) require.NotNil(t, badId) @@ -344,11 +342,11 @@ func TestRepository_LookupAuthToken(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.LookupAuthToken(ctx, tt.id) + got, err := repo.LookupAuthToken(context.Background(), tt.id) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -377,7 +375,6 @@ func TestRepository_LookupAuthToken(t *testing.T) { } func TestRepository_ValidateToken(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") lastAccessedUpdateDuration = 0 timeSkew = 20 * time.Millisecond @@ -386,7 +383,7 @@ func TestRepository_ValidateToken(t *testing.T) { wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) iamRepo := iam.TestRepo(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) require.NotNil(t, repo) @@ -401,11 +398,11 @@ func TestRepository_ValidateToken(t *testing.T) { require.NoError(t, err) require.NotNil(t, atTime) - badId, err := NewAuthTokenId(ctx) + badId, err := NewAuthTokenId() require.NoError(t, err) require.NotNil(t, badId) - badToken, err := newAuthToken(ctx) + badToken, err := newAuthToken() require.NoError(t, err) require.NotNil(t, badToken) @@ -449,7 +446,7 @@ func TestRepository_ValidateToken(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := repo.ValidateToken(ctx, tt.id, tt.token) + got, err := repo.ValidateToken(context.Background(), tt.id, tt.token) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) @@ -482,7 +479,7 @@ func TestRepository_ValidateToken(t *testing.T) { // so the next call doesn't cause the last accessed time to be updated. lastAccessedUpdateDuration = 1 * time.Hour - got2, err := repo.ValidateToken(ctx, tt.id, tt.token) + got2, err := repo.ValidateToken(context.Background(), tt.id, tt.token) assert.NoError(err) preTime2, err := ptypes.Timestamp(got2.GetApproximateLastAccessTime().GetTimestamp()) require.NoError(err) @@ -491,7 +488,7 @@ func TestRepository_ValidateToken(t *testing.T) { // We should find no oplog since tokens are not replicated, so they don't need oplog entries. assert.Error(db.TestVerifyOplog(t, rw, got.GetPublicId(), db.WithOperation(oplog.OpType_OP_TYPE_UPDATE))) - got3, err := repo.ValidateToken(ctx, tt.id, tt.token) + got3, err := repo.ValidateToken(context.Background(), tt.id, tt.token) require.NoError(err) preTime3, err := ptypes.Timestamp(got3.GetApproximateLastAccessTime().GetTimestamp()) require.NoError(err) @@ -546,16 +543,16 @@ func TestRepository_ValidateToken_expired(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - ctx := context.Background() timeSkew = 20 * time.Millisecond - repo, err := NewRepository(ctx, rw, rw, kms, + repo, err := NewRepository(rw, rw, kms, WithTokenTimeToLiveDuration(tt.expirationDuration), WithTokenTimeToStaleDuration(tt.staleDuration)) require.NoError(err) require.NotNil(repo) + ctx := context.Background() at, err := repo.CreateAuthToken(ctx, iamUser, baseAT.GetAuthAccountId()) require.NoError(err) @@ -574,7 +571,6 @@ func TestRepository_ValidateToken_expired(t *testing.T) { } func TestRepository_DeleteAuthToken(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -582,7 +578,7 @@ func TestRepository_DeleteAuthToken(t *testing.T) { repo := iam.TestRepo(t, conn, wrapper) org, _ := iam.TestScopes(t, repo) at := TestAuthToken(t, conn, kms, org.GetPublicId()) - badId, err := NewAuthTokenId(ctx) + badId, err := NewAuthTokenId() require.NoError(t, err) require.NotNil(t, badId) @@ -616,11 +612,11 @@ func TestRepository_DeleteAuthToken(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.DeleteAuthToken(ctx, tt.id) + got, err := repo.DeleteAuthToken(context.Background(), tt.id) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "Unexpected error %s", err) assert.Equal(tt.wantErrMsg, err.Error()) @@ -637,7 +633,6 @@ func TestRepository_DeleteAuthToken(t *testing.T) { } func TestRepository_ListAuthTokens(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -682,10 +677,10 @@ func TestRepository_ListAuthTokens(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.ListAuthTokens(ctx, []string{tt.orgId}) + got, err := repo.ListAuthTokens(context.Background(), []string{tt.orgId}) assert.NoError(err) sort.Slice(tt.want, func(i, j int) bool { return tt.want[i].PublicId < tt.want[j].PublicId }) sort.Slice(got, func(i, j int) bool { return got[i].PublicId < got[j].PublicId }) @@ -696,12 +691,11 @@ func TestRepository_ListAuthTokens(t *testing.T) { func TestRepository_ListAuthTokens_Multiple_Scopes(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) org, proj := iam.TestScopes(t, iamRepo) @@ -715,7 +709,7 @@ func TestRepository_ListAuthTokens_Multiple_Scopes(t *testing.T) { total++ } - got, err := repo.ListAuthTokens(ctx, []string{"global", org.GetPublicId(), proj.GetPublicId()}) + got, err := repo.ListAuthTokens(context.Background(), []string{"global", org.GetPublicId(), proj.GetPublicId()}) require.NoError(t, err) assert.Equal(t, total, len(got)) } @@ -727,7 +721,7 @@ func Test_IssuePendingToken(t *testing.T) { rw := db.New(conn) rootWrapper := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, rootWrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(t, err) org, _ := iam.TestScopes(t, iam.TestRepo(t, conn, rootWrapper)) @@ -746,7 +740,7 @@ func Test_IssuePendingToken(t *testing.T) { { name: "not-found", tokenRequestId: func() string { - tokenPublicId, err := NewAuthTokenId(ctx) + tokenPublicId, err := NewAuthTokenId() require.NoError(t, err) tk := TestAuthToken(t, conn, kmsCache, org.PublicId, WithPublicId(tokenPublicId)) return tk.PublicId @@ -757,7 +751,7 @@ func Test_IssuePendingToken(t *testing.T) { { name: "success", tokenRequestId: func() string { - tokenPublicId, err := NewAuthTokenId(ctx) + tokenPublicId, err := NewAuthTokenId() require.NoError(t, err) tk := TestAuthToken(t, conn, kmsCache, org.PublicId, WithStatus(PendingStatus), WithPublicId(tokenPublicId)) return tk.PublicId @@ -794,7 +788,7 @@ func Test_CloseExpiredPendingTokens(t *testing.T) { rw := db.New(conn) rootWrapper := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, rootWrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(t, err) org, _ := iam.TestScopes(t, iam.TestRepo(t, conn, rootWrapper)) @@ -808,7 +802,7 @@ func Test_CloseExpiredPendingTokens(t *testing.T) { accts := password.TestMultipleAccounts(t, conn, authMethodId, cnt) for i := 0; i < cnt; i++ { at := allocAuthToken() - id, err := NewAuthTokenId(ctx) + id, err := NewAuthTokenId() require.NoError(t, err) at.PublicId = id exp, err := ptypes.TimestampProto(time.Now().Add(expIn).Truncate(time.Second)) diff --git a/internal/authtoken/testing.go b/internal/authtoken/testing.go index bd0a60962f4..d97962676d9 100644 --- a/internal/authtoken/testing.go +++ b/internal/authtoken/testing.go @@ -34,12 +34,12 @@ func TestAuthToken(t testing.TB, conn *db.DB, kms *kms.Kms, scopeId string, opt ctx := context.Background() rw := db.New(conn) - iamRepo, err := iam.NewRepository(ctx, rw, rw, kms) + iamRepo, err := iam.NewRepository(rw, rw, kms) require.NoError(t, err) u := iam.TestUser(t, iamRepo, scopeId, append(opts.withIamOptions, iam.WithAccountIds(acct.PublicId))...) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(t, err) at, err := repo.CreateAuthToken(ctx, u, acct.GetPublicId(), opt...) diff --git a/internal/bsr/chunk_end.go b/internal/bsr/chunk_end.go index ca744047e78..5dca3e5361b 100644 --- a/internal/bsr/chunk_end.go +++ b/internal/bsr/chunk_end.go @@ -31,7 +31,7 @@ func (c *EndChunk) MarshalData(_ context.Context) ([]byte, error) { // NewEnd creates an EndChunk. func NewEnd(ctx context.Context, p Protocol, d Direction, t *Timestamp) (*EndChunk, error) { - const op = "bsr.NewEnd" + const op = "bsr.NewHeader" bc, err := NewBaseChunk(ctx, p, d, t, ChunkEnd) if err != nil { diff --git a/internal/census/census_job.go b/internal/census/census_job.go index da2529ee40b..4ca83b681b0 100644 --- a/internal/census/census_job.go +++ b/internal/census/census_job.go @@ -51,6 +51,7 @@ func (c *censusJob) Status() scheduler.JobStatus { // Run performs the required work depending on the implementation. // The context is used to notify the job that it should exit early. func (c *censusJob) Run(ctx context.Context) error { + const op = "census.(censusJob).Run" err := RunFn(ctx, c) return err } @@ -60,9 +61,10 @@ func runInternal(ctx context.Context, c *censusJob) error { } // NextRunIn returns the duration until the next job run should be scheduled. -// Census will run every hour to ensure any interrupted jobs will be re-attempted +// We report as ready immediately after a successful run. This doesn't mean that +// this job will run immediately, only about as often as the configured scheduler interval. func (c *censusJob) NextRunIn(_ context.Context) (time.Duration, error) { - return time.Hour, nil + return 0, nil } // Name is the unique name of the job. diff --git a/internal/cmd/base/dev.go b/internal/cmd/base/dev.go index 75b5218a300..20b5c6f711a 100644 --- a/internal/cmd/base/dev.go +++ b/internal/cmd/base/dev.go @@ -200,7 +200,7 @@ func (b *Server) CreateDevLdapAuthMethod(ctx context.Context) error { ) if b.DevLdapAuthMethodId == "" { - b.DevLdapAuthMethodId, err = db.NewPublicId(ctx, globals.LdapAuthMethodPrefix) + b.DevLdapAuthMethodId, err = db.NewPublicId(globals.LdapAuthMethodPrefix) if err != nil { return fmt.Errorf("error generating initial ldap auth method id: %w", err) } @@ -330,13 +330,12 @@ func (b *Server) createInitialLdapAuthMethod(ctx context.Context, host string, p ldap.WithDiscoverDn(ctx), ldap.WithUserDn(ctx, testdirectory.DefaultUserDN), ldap.WithGroupDn(ctx, testdirectory.DefaultGroupDN), - ldap.WithOperationalState(ctx, ldap.ActivePublicState), ) if err != nil { return nil, fmt.Errorf("error creating new in memory ldap auth method: %w", err) } if b.DevLdapAuthMethodId == "" { - b.DevLdapAuthMethodId, err = db.NewPublicId(ctx, globals.LdapAuthMethodPrefix) + b.DevLdapAuthMethodId, err = db.NewPublicId(globals.LdapAuthMethodPrefix) if err != nil { return nil, fmt.Errorf("error generating initial ldap auth method id: %w", err) } @@ -366,7 +365,7 @@ func (b *Server) createInitialLdapAuthMethod(ctx context.Context, host string, p } // Link accounts to existing user - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache) if err != nil { return fmt.Errorf("unable to create iam repo: %w", err) } @@ -413,7 +412,7 @@ func (b *Server) CreateDevOidcAuthMethod(ctx context.Context) error { var err error if b.DevOidcAuthMethodId == "" { - b.DevOidcAuthMethodId, err = db.NewPublicId(ctx, globals.OidcAuthMethodPrefix) + b.DevOidcAuthMethodId, err = db.NewPublicId(globals.OidcAuthMethodPrefix) if err != nil { return fmt.Errorf("error generating initial oidc auth method id: %w", err) } @@ -590,7 +589,7 @@ func (b *Server) createInitialOidcAuthMethod(ctx context.Context) (*oidc.AuthMet return nil, fmt.Errorf("error creating new in memory oidc auth method: %w", err) } if b.DevOidcAuthMethodId == "" { - b.DevOidcAuthMethodId, err = db.NewPublicId(ctx, globals.OidcAuthMethodPrefix) + b.DevOidcAuthMethodId, err = db.NewPublicId(globals.OidcAuthMethodPrefix) if err != nil { return nil, fmt.Errorf("error generating initial oidc auth method id: %w", err) } @@ -627,7 +626,7 @@ func (b *Server) createInitialOidcAuthMethod(ctx context.Context) (*oidc.AuthMet } // Link accounts to existing user - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache) if err != nil { return fmt.Errorf("unable to create iam repo: %w", err) } diff --git a/internal/cmd/base/initial_resources.go b/internal/cmd/base/initial_resources.go index 9d0528788f4..87291b6e663 100644 --- a/internal/cmd/base/initial_resources.go +++ b/internal/cmd/base/initial_resources.go @@ -38,13 +38,12 @@ func (b *Server) CreateInitialLoginRole(ctx context.Context) (*iam.Role, error) return nil, fmt.Errorf("error adding config keys to kms: %w", err) } - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) if err != nil { return nil, fmt.Errorf("unable to create repo for initial login role: %w", err) } - pr, err := iam.NewRole(ctx, - scope.Global.String(), + pr, err := iam.NewRole(scope.Global.String(), iam.WithName("Login and Default Grants"), iam.WithDescription(`Role created for login capability, account self-management, and other default grants for users of the global scope at its creation time`), ) @@ -85,11 +84,11 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password } // Create the dev auth method - pwRepo, err := password.NewRepository(ctx, rw, rw, kmsCache) + pwRepo, err := password.NewRepository(rw, rw, kmsCache) if err != nil { return nil, nil, fmt.Errorf("error creating password repo: %w", err) } - authMethod, err := password.NewAuthMethod(ctx, scope.Global.String(), + authMethod, err := password.NewAuthMethod(scope.Global.String(), password.WithName("Generated global scope initial password auth method"), password.WithDescription("Provides initial administrative and unprivileged authentication into Boundary"), ) @@ -97,7 +96,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password return nil, nil, fmt.Errorf("error creating new in memory auth method: %w", err) } if b.DevPasswordAuthMethodId == "" { - b.DevPasswordAuthMethodId, err = db.NewPublicId(ctx, globals.PasswordAuthMethodPrefix) + b.DevPasswordAuthMethodId, err = db.NewPublicId(globals.PasswordAuthMethodPrefix) if err != nil { return nil, nil, fmt.Errorf("error generating initial auth method id: %w", err) } @@ -116,7 +115,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password // users on first login. Otherwise, the operator would have to create both // a password account and a user associated with the new account, before // users could successfully login. - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache) if err != nil { return nil, nil, fmt.Errorf("unable to create iam repo: %w", err) } @@ -147,7 +146,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password b.InfoKeys = append(b.InfoKeys, fmt.Sprintf("generated %s password", typeStr)) b.Info[fmt.Sprintf("generated %s password", typeStr)] = loginPassword - acct, err := password.NewAccount(ctx, am.PublicId, password.WithLoginName(loginName)) + acct, err := password.NewAccount(am.PublicId, password.WithLoginName(loginName)) if err != nil { return nil, fmt.Errorf("error creating new in memory password auth account: %w", err) } @@ -164,7 +163,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password b.InfoKeys = append(b.InfoKeys, fmt.Sprintf("generated %s login name", typeStr)) b.Info[fmt.Sprintf("generated %s login name", typeStr)] = acct.GetLoginName() - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) if err != nil { return nil, fmt.Errorf("unable to create iam repo: %w", err) } @@ -184,7 +183,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password iam.WithDescription("Initial unprivileged user"), ) } - u, err := iam.NewUser(ctx, scope.Global.String(), opts...) + u, err := iam.NewUser(scope.Global.String(), opts...) if err != nil { return nil, fmt.Errorf("error creating in memory user: %w", err) } @@ -198,8 +197,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password return u, nil } // Create a role tying them together - pr, err := iam.NewRole(ctx, - scope.Global.String(), + pr, err := iam.NewRole(scope.Global.String(), iam.WithName("Administration"), iam.WithDescription(fmt.Sprintf(`Provides admin grants within the "%s" scope to the initial user`, scope.Global.String())), ) @@ -243,7 +241,7 @@ func (b *Server) CreateInitialPasswordAuthMethod(ctx context.Context) (*password } } if b.DevUserId == "" { - b.DevUserId, err = db.NewPublicId(ctx, globals.UserPrefix) + b.DevUserId, err = db.NewPublicId(globals.UserPrefix) if err != nil { return nil, nil, fmt.Errorf("error generating initial user id: %w", err) } @@ -270,14 +268,14 @@ func (b *Server) CreateInitialScopes(ctx context.Context) (*iam.Scope, *iam.Scop return nil, nil, fmt.Errorf("error adding config keys to kms: %w", err) } - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache) if err != nil { return nil, nil, fmt.Errorf("error creating scopes repository: %w", err) } // Create the scopes if b.DevOrgId == "" { - b.DevOrgId, err = db.NewPublicId(ctx, scope.Org.Prefix()) + b.DevOrgId, err = db.NewPublicId(scope.Org.Prefix()) if err != nil { return nil, nil, fmt.Errorf("error generating initial org id: %w", err) } @@ -288,7 +286,7 @@ func (b *Server) CreateInitialScopes(ctx context.Context) (*iam.Scope, *iam.Scop iam.WithRandomReader(b.SecureRandomReader), iam.WithPublicId(b.DevOrgId), } - orgScope, err := iam.NewOrg(ctx, opts...) + orgScope, err := iam.NewOrg(opts...) if err != nil { return nil, nil, fmt.Errorf("error creating new in memory org scope: %w", err) } @@ -300,7 +298,7 @@ func (b *Server) CreateInitialScopes(ctx context.Context) (*iam.Scope, *iam.Scop b.Info["generated org scope id"] = b.DevOrgId if b.DevProjectId == "" { - b.DevProjectId, err = db.NewPublicId(ctx, scope.Project.Prefix()) + b.DevProjectId, err = db.NewPublicId(scope.Project.Prefix()) if err != nil { return nil, nil, fmt.Errorf("error generating initial project id: %w", err) } @@ -311,7 +309,7 @@ func (b *Server) CreateInitialScopes(ctx context.Context) (*iam.Scope, *iam.Scop iam.WithRandomReader(b.SecureRandomReader), iam.WithPublicId(b.DevProjectId), } - projScope, err := iam.NewProject(ctx, b.DevOrgId, opts...) + projScope, err := iam.NewProject(b.DevOrgId, opts...) if err != nil { return nil, nil, fmt.Errorf("error creating new in memory project scope: %w", err) } @@ -339,14 +337,14 @@ func (b *Server) CreateInitialHostResources(ctx context.Context) (*static.HostCa return nil, nil, nil, fmt.Errorf("error adding config keys to kms: %w", err) } - staticRepo, err := static.NewRepository(ctx, rw, rw, kmsCache) + staticRepo, err := static.NewRepository(rw, rw, kmsCache) if err != nil { return nil, nil, nil, fmt.Errorf("error creating static repository: %w", err) } // Host Catalog if b.DevHostCatalogId == "" { - b.DevHostCatalogId, err = db.NewPublicId(ctx, globals.StaticHostCatalogPrefix) + b.DevHostCatalogId, err = db.NewPublicId(globals.StaticHostCatalogPrefix) if err != nil { return nil, nil, nil, fmt.Errorf("error generating initial host catalog id: %w", err) } @@ -356,7 +354,7 @@ func (b *Server) CreateInitialHostResources(ctx context.Context) (*static.HostCa static.WithDescription("Provides an initial host catalog in Boundary"), static.WithPublicId(b.DevHostCatalogId), } - hc, err := static.NewHostCatalog(ctx, b.DevProjectId, opts...) + hc, err := static.NewHostCatalog(b.DevProjectId, opts...) if err != nil { return nil, nil, nil, fmt.Errorf("error creating in memory host catalog: %w", err) } @@ -368,7 +366,7 @@ func (b *Server) CreateInitialHostResources(ctx context.Context) (*static.HostCa // Host if b.DevHostId == "" { - b.DevHostId, err = db.NewPublicId(ctx, globals.StaticHostPrefix) + b.DevHostId, err = db.NewPublicId(globals.StaticHostPrefix) if err != nil { return nil, nil, nil, fmt.Errorf("error generating initial host id: %w", err) } @@ -382,7 +380,7 @@ func (b *Server) CreateInitialHostResources(ctx context.Context) (*static.HostCa static.WithAddress(b.DevHostAddress), static.WithPublicId(b.DevHostId), } - h, err := static.NewHost(ctx, hc.PublicId, opts...) + h, err := static.NewHost(hc.PublicId, opts...) if err != nil { return nil, nil, nil, fmt.Errorf("error creating in memory host: %w", err) } @@ -394,7 +392,7 @@ func (b *Server) CreateInitialHostResources(ctx context.Context) (*static.HostCa // Host Set if b.DevHostSetId == "" { - b.DevHostSetId, err = db.NewPublicId(ctx, globals.StaticHostSetPrefix) + b.DevHostSetId, err = db.NewPublicId(globals.StaticHostSetPrefix) if err != nil { return nil, nil, nil, fmt.Errorf("error generating initial host set id: %w", err) } @@ -404,7 +402,7 @@ func (b *Server) CreateInitialHostResources(ctx context.Context) (*static.HostCa static.WithDescription("Provides an initial host set in Boundary"), static.WithPublicId(b.DevHostSetId), } - hs, err := static.NewHostSet(ctx, hc.PublicId, opts...) + hs, err := static.NewHostSet(hc.PublicId, opts...) if err != nil { return nil, nil, nil, fmt.Errorf("error creating in memory host set: %w", err) } @@ -441,7 +439,7 @@ func (b *Server) CreateInitialTargetWithAddress(ctx context.Context) (target.Tar // When this function is not called as part of boundary dev (eg: as part of // boundary database init or tests), generate random target ids. if len(b.DevTargetId) == 0 { - b.DevTargetId, err = db.NewPublicId(ctx, globals.TcpTargetPrefix) + b.DevTargetId, err = db.NewPublicId(globals.TcpTargetPrefix) if err != nil { return nil, fmt.Errorf("failed to generate initial target id: %w", err) } @@ -473,7 +471,7 @@ func (b *Server) CreateInitialTargetWithAddress(ctx context.Context) (target.Tar b.Info["generated target with address id"] = b.DevTargetId if b.DevUnprivilegedUserId != "" { - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) if err != nil { return nil, fmt.Errorf("failed to create iam repository: %w", err) } @@ -508,7 +506,7 @@ func (b *Server) CreateInitialTargetWithHostSources(ctx context.Context) (target // When this function is not called as part of boundary dev (eg: as part of // boundary database init or tests), generate random target ids. if len(b.DevSecondaryTargetId) == 0 { - b.DevSecondaryTargetId, err = db.NewPublicId(ctx, globals.TcpTargetPrefix) + b.DevSecondaryTargetId, err = db.NewPublicId(globals.TcpTargetPrefix) if err != nil { return nil, fmt.Errorf("failed to generate initial secondary target id: %w", err) } @@ -541,7 +539,7 @@ func (b *Server) CreateInitialTargetWithHostSources(ctx context.Context) (target b.Info["generated target with host source id"] = b.DevSecondaryTargetId if b.DevUnprivilegedUserId != "" { - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) + iamRepo, err := iam.NewRepository(rw, rw, kmsCache, iam.WithRandomReader(b.SecureRandomReader)) if err != nil { return nil, fmt.Errorf("failed to create iam repository: %w", err) } @@ -678,8 +676,7 @@ func unprivilegedDevUserRoleSetup(ctx context.Context, repo *iam.Repository, use // Create a new role for the "authorize-session" grant and add the // unprivileged user as a principal. - asRole, err := iam.NewRole(ctx, - projectId, + asRole, err := iam.NewRole(projectId, iam.WithName(fmt.Sprintf("Session authorization for %s", targetId)), iam.WithDescription(fmt.Sprintf("Provides grants within the dev project scope to allow the initial unprivileged user to authorize sessions against %s", targetId)), ) diff --git a/internal/cmd/base/server_test.go b/internal/cmd/base/server_test.go index cbe9854f697..b0921b9a114 100644 --- a/internal/cmd/base/server_test.go +++ b/internal/cmd/base/server_test.go @@ -122,7 +122,7 @@ func TestServer_SetupKMSes_Purposes(t *testing.T) { }, } s := NewServer(&Command{Context: context.Background()}) - require.NoError(s.SetupEventing(s.Context, logger, serLock, "setup-kms-testing")) + require.NoError(s.SetupEventing(logger, serLock, "setup-kms-testing")) err := s.SetupKMSes(s.Context, cli.NewMockUi(), &config.Config{SharedConfig: conf}) if tt.wantErrContains != "" { @@ -180,7 +180,7 @@ func TestServer_SetupKMSes_RootMigration(t *testing.T) { }, } s := NewServer(&Command{Context: context.Background()}) - require.NoError(s.SetupEventing(s.Context, logger, serLock, "setup-kms-testing")) + require.NoError(s.SetupEventing(logger, serLock, "setup-kms-testing")) err := s.SetupKMSes(s.Context, cli.NewMockUi(), &config.Config{SharedConfig: conf}) require.NoError(err) require.NotNil(s.RootKms) @@ -210,7 +210,7 @@ func TestServer_SetupKMSes_RootMigration(t *testing.T) { }, } s := NewServer(&Command{Context: context.Background()}) - require.NoError(s.SetupEventing(s.Context, logger, serLock, "setup-kms-testing")) + require.NoError(s.SetupEventing(logger, serLock, "setup-kms-testing")) err := s.SetupKMSes(s.Context, cli.NewMockUi(), &config.Config{SharedConfig: conf}) require.Error(err) }) @@ -242,7 +242,7 @@ func TestServer_SetupKMSes_RootMigration(t *testing.T) { }, } s := NewServer(&Command{Context: context.Background()}) - require.NoError(s.SetupEventing(s.Context, logger, serLock, "setup-kms-testing")) + require.NoError(s.SetupEventing(logger, serLock, "setup-kms-testing")) err := s.SetupKMSes(s.Context, cli.NewMockUi(), &config.Config{SharedConfig: conf}) require.Error(err) }) @@ -361,7 +361,7 @@ func TestServer_SetupEventing(t *testing.T) { assert, require := assert.New(t), require.New(t) event.TestResetSystEventer(t) - err := tt.s.SetupEventing(context.Background(), tt.logger, tt.lock, tt.name, tt.opt...) + err := tt.s.SetupEventing(tt.logger, tt.lock, tt.name, tt.opt...) if tt.wantErrMatch != nil || tt.wantErrIs != nil { require.Error(err) assert.Nil(tt.s.Eventer) diff --git a/internal/cmd/base/servers.go b/internal/cmd/base/servers.go index e5c6bc53c79..70e4559126c 100644 --- a/internal/cmd/base/servers.go +++ b/internal/cmd/base/servers.go @@ -161,22 +161,22 @@ func NewServer(cmd *Command) *Server { // SetupEventing will setup the server's eventer and initialize the "system // wide" eventer with a pointer to the same eventer -func (b *Server) SetupEventing(ctx context.Context, logger hclog.Logger, serializationLock *sync.Mutex, serverName string, opt ...Option) error { +func (b *Server) SetupEventing(logger hclog.Logger, serializationLock *sync.Mutex, serverName string, opt ...Option) error { const op = "base.(Server).SetupEventing" if logger == nil { - return berrors.New(ctx, berrors.InvalidParameter, op, "missing logger") + return berrors.NewDeprecated(berrors.InvalidParameter, op, "missing logger") } if serializationLock == nil { - return berrors.New(ctx, berrors.InvalidParameter, op, "missing serialization lock") + return berrors.NewDeprecated(berrors.InvalidParameter, op, "missing serialization lock") } if serverName == "" { - return berrors.New(ctx, berrors.InvalidParameter, op, "missing server name") + return berrors.NewDeprecated(berrors.InvalidParameter, op, "missing server name") } opts := getOpts(opt...) if opts.withEventerConfig != nil { if err := opts.withEventerConfig.Validate(); err != nil { - return berrors.Wrap(ctx, err, op, berrors.WithMsg("invalid eventer config")) + return berrors.WrapDeprecated(err, op, berrors.WithMsg("invalid eventer config")) } } if opts.withEventerConfig == nil { @@ -185,7 +185,7 @@ func (b *Server) SetupEventing(ctx context.Context, logger hclog.Logger, seriali if opts.withEventFlags != nil { if err := opts.withEventFlags.Validate(); err != nil { - return berrors.Wrap(ctx, err, op, berrors.WithMsg("invalid event flags")) + return berrors.WrapDeprecated(err, op, berrors.WithMsg("invalid event flags")) } if opts.withEventFlags.Format != "" { for i := 0; i < len(opts.withEventerConfig.Sinks); i++ { @@ -224,12 +224,12 @@ func (b *Server) SetupEventing(ctx context.Context, logger hclog.Logger, seriali event.WithAuditWrapper(opts.withEventWrapper), event.WithGating(opts.withEventGating)) if err != nil { - return berrors.Wrap(ctx, err, op, berrors.WithMsg("unable to create eventer")) + return berrors.WrapDeprecated(err, op, berrors.WithMsg("unable to create eventer")) } b.Eventer = e if err := event.InitSysEventer(logger, serializationLock, serverName, event.WithEventer(e)); err != nil { - return berrors.Wrap(ctx, err, op, berrors.WithMsg("unable to initialize system eventer")) + return berrors.WrapDeprecated(err, op, berrors.WithMsg("unable to initialize system eventer")) } return nil @@ -239,11 +239,11 @@ func (b *Server) SetupEventing(ctx context.Context, logger hclog.Logger, seriali func (b *Server) AddEventerToContext(ctx context.Context) (context.Context, error) { const op = "base.(Server).AddEventerToContext" if b.Eventer == nil { - return nil, berrors.New(ctx, berrors.InvalidParameter, op, "missing server eventer") + return nil, berrors.NewDeprecated(berrors.InvalidParameter, op, "missing server eventer") } e, err := event.NewEventerContext(ctx, b.Eventer) if err != nil { - return nil, berrors.Wrap(ctx, err, op, berrors.WithMsg("unable to add eventer to context")) + return nil, berrors.WrapDeprecated(err, op, berrors.WithMsg("unable to add eventer to context")) } return e, nil } diff --git a/internal/cmd/commands/authtokenscmd/authtokens.gen.go b/internal/cmd/commands/authtokenscmd/authtokens.gen.go index 71ee94dd4af..e3fee451439 100644 --- a/internal/cmd/commands/authtokenscmd/authtokens.gen.go +++ b/internal/cmd/commands/authtokenscmd/authtokens.gen.go @@ -79,7 +79,7 @@ func (c *Command) Help() string { default: - helpStr = c.extraHelpFunc(helpMap) + helpStr = helpMap["base"]() } diff --git a/internal/cmd/commands/authtokenscmd/authtokens.go b/internal/cmd/commands/authtokenscmd/authtokens.go deleted file mode 100644 index 17da532a80c..00000000000 --- a/internal/cmd/commands/authtokenscmd/authtokens.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package authtokenscmd - -import "github.com/hashicorp/boundary/internal/cmd/base" - -func (c *Command) extraHelpFunc(helpMap map[string]func() string) string { - var helpStr string - switch c.Func { - case "": - return base.WrapForHelpText([]string{ - "Usage: boundary auth-tokens [sub command] [options] [args]", - "", - " This command allows operations on Boundary auth token resources. Example:", - "", - " List all auth tokens:", - "", - ` $ boundary auth-tokens list -recursive `, - "", - " Please see the auth-tokens subcommand help for detailed usage information.", - " Note: To create an auth token, see the authenticate subcommand.", - }) - - default: - helpStr = helpMap["base"]() - } - return helpStr -} diff --git a/internal/cmd/commands/connect/connect.go b/internal/cmd/commands/connect/connect.go index 741aa746191..bb38f206488 100644 --- a/internal/cmd/commands/connect/connect.go +++ b/internal/cmd/commands/connect/connect.go @@ -621,10 +621,7 @@ func (c *Command) Run(args []string) (retCode int) { } } - // Only send it if we should, and also if we're not after expiration, with a - // bit of buffer in case clocks are not quite the same between worker and - // this machine. - if sendSessionCancel && time.Now().Before(c.expiration.Add(-5*time.Minute)) { + if sendSessionCancel { ctx, cancel := context.WithTimeout(context.Background(), sessionCancelTimeout) wsConn, err := c.getWsConn(ctx, workerAddr, transport) if err != nil { diff --git a/internal/cmd/commands/database/init.go b/internal/cmd/commands/database/init.go index 72ef658d903..92bf710011b 100644 --- a/internal/cmd/commands/database/init.go +++ b/internal/cmd/commands/database/init.go @@ -200,7 +200,7 @@ func (c *InitCommand) Run(args []string) (retCode int) { return base.CommandCliError } serverName = fmt.Sprintf("%s/boundary-database-init", serverName) - if err := c.SetupEventing(c.Context, c.Logger, c.StderrLock, serverName, base.WithEventerConfig(c.Config.Eventing)); err != nil { + if err := c.SetupEventing(c.Logger, c.StderrLock, serverName, base.WithEventerConfig(c.Config.Eventing)); err != nil { c.UI.Error(err.Error()) return base.CommandCliError } @@ -500,7 +500,7 @@ func (c *InitCommand) verifyOplogIsEmpty(ctx context.Context) error { const op = "database.(InitCommand).verifyOplogIsEmpty" underlyingDB, err := c.Database.SqlDB(ctx) if err != nil { - return errors.New(ctx, errors.Internal, op, "unable to retreive db", errors.WithWrap(err)) + return errors.NewDeprecated(errors.Internal, op, "unable to retreive db", errors.WithWrap(err)) } r := underlyingDB.QueryRowContext(c.Context, "select not exists(select 1 from oplog_entry limit 1)") if r.Err() != nil { @@ -511,7 +511,7 @@ func (c *InitCommand) verifyOplogIsEmpty(ctx context.Context) error { return errors.Wrap(ctx, err, op) } if !empty { - return errors.New(ctx, errors.MigrationIntegrity, op, "oplog_entry is not empty") + return errors.NewDeprecated(errors.MigrationIntegrity, op, "oplog_entry is not empty") } return nil } diff --git a/internal/cmd/commands/database/migrate.go b/internal/cmd/commands/database/migrate.go index 61d2eab8beb..a23b73d571f 100644 --- a/internal/cmd/commands/database/migrate.go +++ b/internal/cmd/commands/database/migrate.go @@ -165,7 +165,6 @@ func (c *MigrateCommand) Run(args []string) (retCode int) { } serverName = fmt.Sprintf("%s/boundary-database-migrate", serverName) if err := c.srv.SetupEventing( - c.Context, c.srv.Logger, c.srv.StderrLock, serverName, diff --git a/internal/cmd/commands/dev/dev.go b/internal/cmd/commands/dev/dev.go index c1a45b6d1dc..6cb5eb8d49c 100644 --- a/internal/cmd/commands/dev/dev.go +++ b/internal/cmd/commands/dev/dev.go @@ -658,7 +658,6 @@ func (c *Command) Run(args []string) int { base.StartMemProfiler(c.Context) if err := c.SetupEventing( - c.Context, c.Logger, c.StderrLock, serverName, @@ -925,7 +924,7 @@ func (c *Command) Run(args []string) int { return base.CommandCliError } - opsServer, err := ops.NewServer(c.Context, c.Logger, c.controller, c.worker, c.Listeners...) + opsServer, err := ops.NewServer(c.Logger, c.controller, c.worker, c.Listeners...) if err != nil { c.UI.Error(fmt.Errorf("Failed to start ops listeners: %w", err).Error()) return base.CommandCliError diff --git a/internal/cmd/commands/rolescmd/funcs.go b/internal/cmd/commands/rolescmd/funcs.go index 8cbfaf801d5..23b1f4c2003 100644 --- a/internal/cmd/commands/rolescmd/funcs.go +++ b/internal/cmd/commands/rolescmd/funcs.go @@ -13,7 +13,6 @@ import ( "github.com/hashicorp/boundary/internal/cmd/base" "github.com/hashicorp/boundary/internal/perms" "github.com/hashicorp/boundary/internal/types/scope" - "github.com/hashicorp/boundary/version" "github.com/mitchellh/go-wordwrap" ) @@ -219,20 +218,11 @@ func extraFlagsHandlingFuncImpl(c *Command, _ *base.FlagSets, opts *[]roles.Opti if len(c.flagGrants) > 0 { for _, grant := range c.flagGrants { - parsed, err := perms.Parse(c.Context, scope.Global.String(), grant) + _, err := perms.Parse(scope.Global.String(), grant) if err != nil { c.UI.Error(fmt.Errorf("Grant %q could not be parsed successfully: %w", grant, err).Error()) return false } - switch { - case parsed.Id() == "": - // Nothing - case version.SupportsFeature(version.Binary, version.SupportIdInGrants): - c.UI.Warn(fmt.Sprintf("Grant %q uses the %q field, which is deprecated and will not be allowed in version 0.15.0+. Please use %q instead.", grant, "id", "ids")) - default: - c.UI.Error(fmt.Sprintf("Grant %q uses the %q field which is no longer supported. Please use %q instead.", grant, "id", "ids")) - return false - } } } diff --git a/internal/cmd/commands/server/server.go b/internal/cmd/commands/server/server.go index b2edce976fd..0774b53eae8 100644 --- a/internal/cmd/commands/server/server.go +++ b/internal/cmd/commands/server/server.go @@ -182,8 +182,7 @@ func (c *Command) Run(args []string) int { } serverName = fmt.Sprintf("%s/%s", serverName, strings.Join(serverTypes, "+")) - if err := c.SetupEventing(c.Context, - c.Logger, + if err := c.SetupEventing(c.Logger, c.StderrLock, serverName, base.WithEventerConfig(c.Config.Eventing), @@ -522,7 +521,7 @@ func (c *Command) Run(args []string) int { return base.CommandCliError } - opsServer, err := ops.NewServer(c.Context, c.Logger, c.controller, c.worker, c.Listeners...) + opsServer, err := ops.NewServer(c.Logger, c.controller, c.worker, c.Listeners...) if err != nil { c.UI.Error(err.Error()) return base.CommandCliError diff --git a/internal/cmd/commands/server/server_test.go b/internal/cmd/commands/server/server_test.go index b2d7ed85c3d..466b7d8484e 100644 --- a/internal/cmd/commands/server/server_test.go +++ b/internal/cmd/commands/server/server_test.go @@ -59,7 +59,7 @@ func testServerCommand(t *testing.T, opts testServerCommandOpts) *Command { } require.NoError(cmd.SetupLogging("trace", "", "", "")) - require.NoError(cmd.SetupEventing(cmd.Context, cmd.Logger, cmd.StderrLock, "test-server-command")) + require.NoError(cmd.SetupEventing(cmd.Logger, cmd.StderrLock, "test-server-command")) if !opts.EnableMetrics { cmd.PrometheusRegisterer = nil diff --git a/internal/cmd/commands/sessionrecordingscmd/download.go b/internal/cmd/commands/sessionrecordingscmd/download.go index d301c50373e..bca6f8ca0bc 100644 --- a/internal/cmd/commands/sessionrecordingscmd/download.go +++ b/internal/cmd/commands/sessionrecordingscmd/download.go @@ -54,18 +54,18 @@ func (c *DownloadCommand) Flags() *base.FlagSets { f.StringVar(&base.StringVar{ Name: "id", Target: &c.FlagId, - Usage: "The id of the session recording resource to download.", + Usage: "The id of the session recording resource to download", }) f.StringVar(&base.StringVar{ Name: "output", Target: &c.FlagOutputFile, - Usage: "An optional output file for the download. If not provided the recording id will be used with a \".cast\" extension. Use \"-\" for stdout.", + Usage: "An optional output file for the download. If not provided the recording id will be used with a \".cast\" extension. Use \"-\" for stdout", Aliases: []string{"o"}, }) f.BoolVar(&base.BoolVar{ Name: "no-clobber", Target: &c.FlagNoClobber, - Usage: "An option to stop downloads that would overwrite existing files.", + Usage: "An option to stop downloads that would overwrite existing files", Aliases: []string{"nc"}, }) return set diff --git a/internal/cmd/commands/sessionrecordingscmd/funcs.go b/internal/cmd/commands/sessionrecordingscmd/funcs.go index 6c26a2d904f..b9303346392 100644 --- a/internal/cmd/commands/sessionrecordingscmd/funcs.go +++ b/internal/cmd/commands/sessionrecordingscmd/funcs.go @@ -312,119 +312,6 @@ func printItemTable(item *sessionrecordings.SessionRecording, resp *api.Response ) } } - if len(item.CreateTimeValues.CredentialLibraries) > 0 { - ret = append(ret, - "", - " Credential Libraries:") - for _, cl := range item.CreateTimeValues.CredentialLibraries { - cm := map[string]any{ - "ID": cl.Id, - } - if cl.Name != "" { - cm["Name"] = cl.Name - } - if cl.Description != "" { - cm["Description"] = cl.Description - } - if cl.Type != "" { - cm["Type"] = cl.Type - } - if len(cl.Purposes) > 0 { - cm["Purpose"] = strings.Join(cl.Purposes, ", ") - } - if attrs, _ := cl.GetVaultSSHCertificateCredentialLibraryAttributes(); attrs != nil { - if attrs.Path != "" { - cm["Vault Path"] = attrs.Path - } - if attrs.Username != "" { - cm["Username"] = attrs.Username - } - if attrs.KeyType != "" { - cm["Key Type"] = attrs.KeyType - } - if attrs.Ttl != "" { - cm["Ttl"] = attrs.Ttl - } - } - if attrs, _ := cl.GetVaultCredentialLibraryAttributes(); attrs != nil { - if attrs.Path != "" { - cm["Vault Path"] = attrs.Path - } - if attrs.HttpMethod != "" { - cm["Http Method"] = attrs.HttpMethod - } - if attrs.HttpRequestBody != "" { - cm["Http Request Body"] = attrs.HttpRequestBody - } - } - maxLibLength := base.MaxAttributesLength(cm, nil, nil) - ret = append(ret, - base.WrapMap(4, maxLibLength, cm), - "", - ) - if cs := cl.CredentialStore; cs != nil { - csm := credStoreMap(cs) - maxStoreLength := base.MaxAttributesLength(csm, nil, nil) - ret = append(ret, - " Credential Store:", - base.WrapMap(6, maxStoreLength, csm), - "", - ) - } - } - } - - if len(item.CreateTimeValues.Credentials) > 0 { - ret = append(ret, - "", - " Credentials:") - for _, c := range item.CreateTimeValues.Credentials { - cm := map[string]any{ - "ID": c.Id, - } - if c.Name != "" { - cm["Name"] = c.Name - } - if c.Description != "" { - cm["Description"] = c.Description - } - if c.Type != "" { - cm["Type"] = c.Type - } - if len(c.Purposes) > 0 { - cm["Purpose"] = strings.Join(c.Purposes, ", ") - } - if attrs, _ := c.GetJsonCredentialAttributes(); attrs != nil { - if attrs.ObjectHmac != "" { - cm["Object HMAC"] = attrs.ObjectHmac - } - } - if attrs, _ := c.GetUsernamePasswordCredentialAttributes(); attrs != nil { - if attrs.Username != "" { - cm["Username"] = attrs.Username - } - } - if attrs, _ := c.GetSshPrivateKeyCredentialAttributes(); attrs != nil { - if attrs.Username != "" { - cm["Username"] = attrs.Username - } - } - maxLibLength := base.MaxAttributesLength(cm, nil, nil) - ret = append(ret, - base.WrapMap(4, maxLibLength, cm), - "", - ) - if cs := c.CredentialStore; cs != nil { - csm := credStoreMap(cs) - maxStoreLength := base.MaxAttributesLength(csm, nil, nil) - ret = append(ret, - " Credential Store:", - base.WrapMap(6, maxStoreLength, csm), - "", - ) - } - } - } } if len(item.ConnectionRecordings) > 0 { @@ -514,36 +401,6 @@ func printItemTable(item *sessionrecordings.SessionRecording, resp *api.Response return base.WrapForHelpText(ret) } -func credStoreMap(cs *sessionrecordings.CredentialStore) map[string]any { - csm := map[string]any{ - "ID": cs.Id, - } - if cs.Name != "" { - csm["Name"] = cs.Name - } - if cs.Description != "" { - csm["Description"] = cs.Description - } - if cs.ScopeId != "" { - csm["Scope ID"] = cs.ScopeId - } - if cs.Type != "" { - csm["Type"] = cs.Type - } - if attrs, _ := cs.GetVaultCredentialStoreAttributes(); attrs != nil { - if attrs.Address != "" { - csm["Vault Address"] = attrs.Address - } - if attrs.Namespace != "" { - csm["Namespace"] = attrs.Namespace - } - if attrs.WorkerFilter != "" { - csm["Worker Filter"] = attrs.WorkerFilter - } - } - return csm -} - func customScopeInfoForOutput(scp *scopes.ScopeInfo, maxLength int, prefixSpaces int) string { if scp == nil { return " " diff --git a/internal/cmd/config/config.go b/internal/cmd/config/config.go index a56e6a786fc..6d099dbf464 100644 --- a/internal/cmd/config/config.go +++ b/internal/cmd/config/config.go @@ -218,7 +218,7 @@ type Controller struct { License string `hcl:"license"` } -func (c *Controller) InitNameIfEmpty(ctx context.Context) error { +func (c *Controller) InitNameIfEmpty() error { if c == nil { return fmt.Errorf("controller config is empty") } @@ -227,7 +227,7 @@ func (c *Controller) InitNameIfEmpty(ctx context.Context) error { } var err error - c.Name, err = db.NewPublicId(ctx, "c") + c.Name, err = db.NewPublicId("c") if err != nil { return fmt.Errorf("error auto-generating controller name: %w", err) } @@ -346,10 +346,7 @@ func DevWorker(opt ...Option) (*Config, error) { if err != nil { return nil, fmt.Errorf("error parsing dev config: %w", err) } - opts, err := getOpts(opt...) - if err != nil { - return nil, fmt.Errorf("error parsing options: %w", err) - } + opts := getOpts(opt...) parsed.Eventing.AuditEnabled = opts.withAuditEventsEnabled parsed.Eventing.ObservationsEnabled = opts.withObservationsEnabled parsed.Eventing.SysEventsEnabled = opts.withSysEventsEnabled @@ -392,10 +389,7 @@ func DevController(opt ...Option) (*Config, error) { parsed.DevWorkerAuthKey = workerAuthKey parsed.DevBsrKey = bsrKey parsed.DevRecoveryKey = recoveryKey - opts, err := getOpts(opt...) - if err != nil { - return nil, fmt.Errorf("error parsing options: %w", err) - } + opts := getOpts(opt...) parsed.Eventing.AuditEnabled = opts.withAuditEventsEnabled parsed.Eventing.ObservationsEnabled = opts.withObservationsEnabled parsed.Eventing.SysEventsEnabled = opts.withSysEventsEnabled diff --git a/internal/cmd/config/options.go b/internal/cmd/config/options.go index aed401b7a3e..2a50d8da608 100644 --- a/internal/cmd/config/options.go +++ b/internal/cmd/config/options.go @@ -5,30 +5,19 @@ package config import ( "os" "testing" - - "github.com/hashicorp/go-secure-stdlib/parseutil" ) // getOpts - iterate the inbound Options and return a struct -func getOpts(opt ...Option) (options, error) { - opts, err := getDefaultOptions() - if err != nil { - return opts, err - } +func getOpts(opt ...Option) options { + opts := getDefaultOptions() for _, o := range opt { - if o == nil { - continue - } - err = o(&opts) - if err != nil { - return opts, err - } + o(&opts) } - return opts, nil + return opts } // Option - how Options are passed as arguments -type Option func(*options) error +type Option func(*options) // options = how options are represented type options struct { @@ -38,65 +27,50 @@ type options struct { testWithErrorEventsEnabled bool } -func getDefaultOptions() (options, error) { +func getDefaultOptions() options { opts := options{} - sysEvents, err := parseutil.ParseBool(os.Getenv("BOUNDARY_ENABLE_TEST_SYS_EVENTS")) - if err != nil { - return opts, err + if os.Getenv("BOUNDARY_ENABLE_TEST_SYS_EVENTS") != "" { + opts.withSysEventsEnabled = true } - opts.withSysEventsEnabled = sysEvents - - auditEvents, err := parseutil.ParseBool(os.Getenv("BOUNDARY_ENABLE_TEST_AUDIT_EVENTS")) - if err != nil { - return opts, err + if os.Getenv("BOUNDARY_ENABLE_TEST_AUDIT_EVENTS") != "" { + opts.withAuditEventsEnabled = true } - opts.withAuditEventsEnabled = auditEvents - - obs, err := parseutil.ParseBool(os.Getenv("BOUNDARY_ENABLE_TEST_OBSERVATIONS")) - if err != nil { - return opts, err + if os.Getenv("BOUNDARY_ENABLE_TEST_OBSERVATIONS") != "" { + opts.withObservationsEnabled = true } - opts.withObservationsEnabled = obs - - errEvents, err := parseutil.ParseBool(os.Getenv("BOUNDARY_ENABLE_TEST_ERROR_EVENTS")) - if err != nil { - return opts, err + if os.Getenv("BOUNDARY_ENABLE_TEST_ERROR_EVENTS") != "" { + opts.testWithErrorEventsEnabled = true } - opts.testWithErrorEventsEnabled = errEvents - return opts, nil + return opts } // WithSysEventsEnabled provides an option for enabling system events func WithSysEventsEnabled(enable bool) Option { - return func(o *options) error { + return func(o *options) { o.withSysEventsEnabled = enable - return nil } } // WithAuditEventsEnabled provides an option for enabling audit events func WithAuditEventsEnabled(enable bool) Option { - return func(o *options) error { + return func(o *options) { o.withAuditEventsEnabled = enable - return nil } } // WithObservationsEnabled provides an option for enabling observation events func WithObservationsEnabled(enable bool) Option { - return func(o *options) error { + return func(o *options) { o.withObservationsEnabled = enable - return nil } } // TestWithErrorEventsEnabled provides an option for enabling error events // during tests. func TestWithErrorEventsEnabled(_ testing.TB, enable bool) Option { - return func(o *options) error { + return func(o *options) { o.testWithErrorEventsEnabled = enable - return nil } } diff --git a/internal/cmd/gencli/input.go b/internal/cmd/gencli/input.go index 767e5c2bc0b..9e9ada40cbd 100644 --- a/internal/cmd/gencli/input.go +++ b/internal/cmd/gencli/input.go @@ -204,11 +204,10 @@ var inputStructs = map[string][]*cmdInfo{ }, "authtokens": { { - ResourceType: resource.AuthToken.String(), - Pkg: "authtokens", - StdActions: []string{"read", "delete", "list"}, - HasExtraHelpFunc: true, - Container: "Scope", + ResourceType: resource.AuthToken.String(), + Pkg: "authtokens", + StdActions: []string{"read", "delete", "list"}, + Container: "Scope", }, }, "credentialstores": { diff --git a/internal/cmd/ops/server.go b/internal/cmd/ops/server.go index 1444c943f19..300acc7e4c8 100644 --- a/internal/cmd/ops/server.go +++ b/internal/cmd/ops/server.go @@ -40,7 +40,7 @@ type opsBundle struct { // NewServer iterates through all the listeners and sets up HTTP Servers for each, along with individual handlers. // If Controller is set-up, NewServer will set-up a health endpoint for it. -func NewServer(ctx context.Context, l hclog.Logger, c *controller.Controller, w *worker.Worker, listeners ...*base.ServerListener) (*Server, error) { +func NewServer(l hclog.Logger, c *controller.Controller, w *worker.Worker, listeners ...*base.ServerListener) (*Server, error) { const op = "ops.NewServer()" if l == nil { return nil, fmt.Errorf("%s: missing logger", op) @@ -58,7 +58,7 @@ func NewServer(ctx context.Context, l hclog.Logger, c *controller.Controller, w return nil, fmt.Errorf("%s: missing ops listener", op) } - h, err := createOpsHandler(ctx, ln.Config, c, w) + h, err := createOpsHandler(ln.Config, c, w) if err != nil { return nil, err } @@ -132,7 +132,7 @@ func (s *Server) WaitIfHealthExists(d time.Duration, ui cli.Ui) { <-time.After(d) } -func createOpsHandler(ctx context.Context, lncfg *listenerutil.ListenerConfig, c *controller.Controller, w *worker.Worker) (http.Handler, error) { +func createOpsHandler(lncfg *listenerutil.ListenerConfig, c *controller.Controller, w *worker.Worker) (http.Handler, error) { mux := http.NewServeMux() var h http.Handler var err error diff --git a/internal/cmd/ops/server_test.go b/internal/cmd/ops/server_test.go index 6006cec319f..81063bd28ac 100644 --- a/internal/cmd/ops/server_test.go +++ b/internal/cmd/ops/server_test.go @@ -107,7 +107,7 @@ func TestNewServer(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - s, err := NewServer(context.Background(), tt.logger, tt.c, tt.w, tt.listeners...) + s, err := NewServer(tt.logger, tt.c, tt.w, tt.listeners...) if tt.expErr { require.EqualError(t, err, tt.expErrMsg) require.Nil(t, s) @@ -279,7 +279,7 @@ func TestNewServerIntegration(t *testing.T) { err := bs.SetupListeners(nil, &configutil.SharedConfig{Listeners: tt.listeners}, []string{"ops"}) require.NoError(t, err) - s, err := NewServer(context.Background(), hclog.Default(), nil, nil, bs.Listeners...) + s, err := NewServer(hclog.Default(), nil, nil, bs.Listeners...) if tt.expErr { require.EqualError(t, err, tt.expErrMsg) require.Nil(t, s) @@ -575,7 +575,7 @@ func TestHealthEndpointLifecycle(t *testing.T) { require.NoError(t, err) // Controller has started and is set onto our Command object, start ops. - opsServer, err := NewServer(tc.Context(), hclog.Default(), tc.Controller(), nil, tc.Config().Listeners...) + opsServer, err := NewServer(hclog.Default(), tc.Controller(), nil, tc.Config().Listeners...) require.NoError(t, err) opsServer.Start() @@ -665,7 +665,6 @@ func TestWaitIfHealthExists(t *testing.T) { } func TestCreateOpsHandler(t *testing.T) { - ctx := context.Background() tests := []struct { name string setupController bool @@ -776,7 +775,7 @@ func TestCreateOpsHandler(t *testing.T) { w = tc.Worker() } - h, err := createOpsHandler(ctx, tt.lncfg, c, w) + h, err := createOpsHandler(tt.lncfg, c, w) if tt.expErr { require.EqualError(t, err, tt.expErrMsg) require.Nil(t, h) @@ -791,7 +790,7 @@ func TestCreateOpsHandler(t *testing.T) { go s.Serve(l) t.Cleanup(func() { - require.NoError(t, s.Shutdown(ctx)) + require.NoError(t, s.Shutdown(context.Background())) }) tt.assertions(t, l.Addr().String()) diff --git a/internal/credential/public_ids.go b/internal/credential/public_ids.go index 64ec5ac996d..5626bd2215a 100644 --- a/internal/credential/public_ids.go +++ b/internal/credential/public_ids.go @@ -33,7 +33,7 @@ const ( ) func NewUsernamePasswordCredentialId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.UsernamePasswordCredentialPrefix) + id, err := db.NewPublicId(globals.UsernamePasswordCredentialPrefix) if err != nil { return "", errors.Wrap(ctx, err, "credential.NewUsernamePasswordCredentialId") } @@ -41,7 +41,7 @@ func NewUsernamePasswordCredentialId(ctx context.Context) (string, error) { } func NewSshPrivateKeyCredentialId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.SshPrivateKeyCredentialPrefix) + id, err := db.NewPublicId(globals.SshPrivateKeyCredentialPrefix) if err != nil { return "", errors.Wrap(ctx, err, "credential.NewSshPrivateKeyCredentialId") } @@ -49,7 +49,7 @@ func NewSshPrivateKeyCredentialId(ctx context.Context) (string, error) { } func NewJsonCredentialId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.JsonCredentialPrefix) + id, err := db.NewPublicId(globals.JsonCredentialPrefix) if err != nil { return "", errors.Wrap(ctx, err, "credential.NewJsonCredentialId") } diff --git a/internal/credential/static/public_ids.go b/internal/credential/static/public_ids.go index 73d29db722b..723244a7f7b 100644 --- a/internal/credential/static/public_ids.go +++ b/internal/credential/static/public_ids.go @@ -25,7 +25,7 @@ const ( ) func newCredentialStoreId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.StaticCredentialStorePrefix) + id, err := db.NewPublicId(globals.StaticCredentialStorePrefix) if err != nil { return "", errors.Wrap(ctx, err, "static.newCredentialStoreId") } diff --git a/internal/credential/vault/client_certificate.go b/internal/credential/vault/client_certificate.go index 4d814aabc14..476ae61b653 100644 --- a/internal/credential/vault/client_certificate.go +++ b/internal/credential/vault/client_certificate.go @@ -25,10 +25,10 @@ type ClientCertificate struct { } // NewClientCertificate creates a new in memory ClientCertificate. -func NewClientCertificate(ctx context.Context, certificate []byte, key KeySecret) (*ClientCertificate, error) { +func NewClientCertificate(certificate []byte, key KeySecret) (*ClientCertificate, error) { const op = "vault.NewClientCertificate" if len(certificate) == 0 { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no certificate") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no certificate") } certificateCopy := make([]byte, len(certificate)) diff --git a/internal/credential/vault/client_certificate_test.go b/internal/credential/vault/client_certificate_test.go index 3d947cdc626..d0928ddb166 100644 --- a/internal/credential/vault/client_certificate_test.go +++ b/internal/credential/vault/client_certificate_test.go @@ -106,7 +106,7 @@ func TestClientCertificate_New(t *testing.T) { require.NoError(err) require.NotNil(databaseWrapper) - got, err := NewClientCertificate(ctx, tt.args.certificate, tt.args.key) + got, err := NewClientCertificate(tt.args.certificate, tt.args.key) if tt.wantErr { assert.Error(err) require.Nil(got) diff --git a/internal/credential/vault/credential.go b/internal/credential/vault/credential.go index 28522205e91..1127c59081c 100644 --- a/internal/credential/vault/credential.go +++ b/internal/credential/vault/credential.go @@ -4,7 +4,6 @@ package vault import ( - "context" "time" "github.com/hashicorp/boundary/internal/credential/vault/store" @@ -48,13 +47,13 @@ type Credential struct { expiration time.Duration `gorm:"-"` } -func newCredential(ctx context.Context, libraryId, externalId string, tokenHmac []byte, expiration time.Duration) (*Credential, error) { +func newCredential(libraryId, externalId string, tokenHmac []byte, expiration time.Duration) (*Credential, error) { const op = "vault.newCredential" if libraryId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no library id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no library id") } if len(tokenHmac) == 0 { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no tokenHmac") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no tokenHmac") } status := string(ActiveCredential) diff --git a/internal/credential/vault/credential_library_test.go b/internal/credential/vault/credential_library_test.go index f6673fbcc77..c55701cb0ad 100644 --- a/internal/credential/vault/credential_library_test.go +++ b/internal/credential/vault/credential_library_test.go @@ -302,7 +302,7 @@ func TestCredentialLibrary_New(t *testing.T) { assert.Failf("Unknown credential type", "%s", ct) } - id, err := newCredentialLibraryId(ctx) + id, err := newCredentialLibraryId() assert.NoError(err) tt.want.PublicId = id diff --git a/internal/credential/vault/credential_store_test.go b/internal/credential/vault/credential_store_test.go index e70e74de144..352fca96ed3 100644 --- a/internal/credential/vault/credential_store_test.go +++ b/internal/credential/vault/credential_store_test.go @@ -18,7 +18,6 @@ import ( func TestCredentialStore_New(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) rw := db.New(conn) @@ -26,7 +25,7 @@ func TestCredentialStore_New(t *testing.T) { _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) inCert := testClientCert(t, testCaCert(t)) - clientCert, err := NewClientCertificate(ctx, inCert.Cert.Cert, inCert.Cert.Key) + clientCert, err := NewClientCertificate(inCert.Cert.Cert, inCert.Cert.Key) require.NoError(t, err) require.NotNil(t, clientCert) @@ -254,13 +253,13 @@ func TestCredentialStore_New(t *testing.T) { assert.Equal(tt.want, got) assert.Empty(cmp.Diff(tt.want, got.clone(), protocmp.Transform())) - id, err := newCredentialStoreId(ctx) + id, err := newCredentialStoreId() assert.NoError(err) tt.want.PublicId = id got.PublicId = id - err2 := rw.Create(ctx, got) + err2 := rw.Create(context.Background(), got) if tt.wantCreateErr { assert.Error(err2) } else { diff --git a/internal/credential/vault/credential_test.go b/internal/credential/vault/credential_test.go index fc667dc0705..bb2d8e32cfe 100644 --- a/internal/credential/vault/credential_test.go +++ b/internal/credential/vault/credential_test.go @@ -136,7 +136,7 @@ func TestCredential_New(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() - got, err := newCredential(ctx, tt.args.libraryId, + got, err := newCredential(tt.args.libraryId, tt.args.externalId, tt.args.tokenHmac, tt.args.expiration) if tt.wantErr { assert.Error(err) @@ -148,7 +148,7 @@ func TestCredential_New(t *testing.T) { assert.Emptyf(got.PublicId, "PublicId set") - id, err := newCredentialId(ctx) + id, err := newCredentialId() assert.NoError(err) tt.want.PublicId = id diff --git a/internal/credential/vault/jobs.go b/internal/credential/vault/jobs.go index ca69399603f..9cc6f6c8046 100644 --- a/internal/credential/vault/jobs.go +++ b/internal/credential/vault/jobs.go @@ -32,42 +32,42 @@ const ( func RegisterJobs(ctx context.Context, scheduler *scheduler.Scheduler, r db.Reader, w db.Writer, kms *kms.Kms) error { const op = "vault.RegisterJobs" - tokenRenewal, err := newTokenRenewalJob(ctx, r, w, kms) + tokenRenewal, err := newTokenRenewalJob(r, w, kms) if err != nil { return errors.Wrap(ctx, err, op) } if err = scheduler.RegisterJob(ctx, tokenRenewal); err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("token renewal job")) } - tokenRevoke, err := newTokenRevocationJob(ctx, r, w, kms) + tokenRevoke, err := newTokenRevocationJob(r, w, kms) if err != nil { return errors.Wrap(ctx, err, op) } if err = scheduler.RegisterJob(ctx, tokenRevoke); err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("token revocation job")) } - credRenewal, err := newCredentialRenewalJob(ctx, r, w, kms) + credRenewal, err := newCredentialRenewalJob(r, w, kms) if err != nil { return errors.Wrap(ctx, err, op) } if err = scheduler.RegisterJob(ctx, credRenewal); err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("credential renewal job")) } - credRevoke, err := newCredentialRevocationJob(ctx, r, w, kms) + credRevoke, err := newCredentialRevocationJob(r, w, kms) if err != nil { return errors.Wrap(ctx, err, op) } if err = scheduler.RegisterJob(ctx, credRevoke); err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("credential revocation job")) } - credStoreCleanup, err := newCredentialStoreCleanupJob(ctx, r, w, kms) + credStoreCleanup, err := newCredentialStoreCleanupJob(r, w, kms) if err != nil { return errors.Wrap(ctx, err, op) } if err = scheduler.RegisterJob(ctx, credStoreCleanup); err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("credential store cleanup job")) } - credCleanup, err := newCredentialCleanupJob(ctx, w) + credCleanup, err := newCredentialCleanupJob(w) if err != nil { return errors.Wrap(ctx, err, op) } @@ -94,15 +94,15 @@ type TokenRenewalJob struct { // newTokenRenewalJob creates a new in-memory TokenRenewalJob. // // WithLimit is the only supported option. -func newTokenRenewalJob(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*TokenRenewalJob, error) { +func newTokenRenewalJob(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*TokenRenewalJob, error) { const op = "vault.newTokenRenewalJob" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := getOpts(opt...) @@ -243,7 +243,7 @@ func (r *TokenRenewalJob) NextRunIn(ctx context.Context) (time.Duration, error) const op = "vault.(TokenRenewalJob).NextRunIn" next, err := nextRenewal(ctx, r) if err != nil { - return defaultNextRunIn, errors.Wrap(ctx, err, op) + return defaultNextRunIn, errors.WrapDeprecated(err, op) } return next, nil @@ -261,12 +261,12 @@ func nextRenewal(ctx context.Context, j scheduler.Job) (time.Duration, error) { query = credentialRenewalNextRunInQuery r = job.reader default: - return 0, errors.New(ctx, errors.Unknown, op, "unknown job") + return 0, errors.NewDeprecated(errors.Unknown, op, "unknown job") } rows, err := r.Query(context.Background(), query, nil) if err != nil { - return 0, errors.Wrap(ctx, err, op) + return 0, errors.WrapDeprecated(err, op) } defer rows.Close() @@ -277,7 +277,7 @@ func nextRenewal(ctx context.Context, j scheduler.Job) (time.Duration, error) { var n NextRenewal err = r.ScanRows(ctx, rows, &n) if err != nil { - return 0, errors.Wrap(ctx, err, op) + return 0, errors.WrapDeprecated(err, op) } if n.RenewalIn < 0 { // If we are past the next renewal time, return 0 to schedule immediately @@ -317,15 +317,15 @@ type TokenRevocationJob struct { // newTokenRevocationJob creates a new in-memory TokenRevocationJob. // // WithLimit is the only supported option. -func newTokenRevocationJob(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*TokenRevocationJob, error) { +func newTokenRevocationJob(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*TokenRevocationJob, error) { const op = "vault.newTokenRevocationJob" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := getOpts(opt...) @@ -482,15 +482,15 @@ type CredentialRenewalJob struct { // newCredentialRenewalJob creates a new in-memory CredentialRenewalJob. // // WithLimit is the only supported option. -func newCredentialRenewalJob(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*CredentialRenewalJob, error) { +func newCredentialRenewalJob(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*CredentialRenewalJob, error) { const op = "vault.newCredentialRenewalJob" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := getOpts(opt...) @@ -611,7 +611,7 @@ func (r *CredentialRenewalJob) NextRunIn(ctx context.Context) (time.Duration, er const op = "vault.(CredentialRenewalJob).NextRunIn" next, err := nextRenewal(ctx, r) if err != nil { - return defaultNextRunIn, errors.Wrap(ctx, err, op) + return defaultNextRunIn, errors.WrapDeprecated(err, op) } return next, nil @@ -645,15 +645,15 @@ type CredentialRevocationJob struct { // newCredentialRevocationJob creates a new in-memory CredentialRevocationJob. // // WithLimit is the only supported option. -func newCredentialRevocationJob(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*CredentialRevocationJob, error) { +func newCredentialRevocationJob(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*CredentialRevocationJob, error) { const op = "vault.newCredentialRevocationJob" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := getOpts(opt...) @@ -787,15 +787,15 @@ type CredentialStoreCleanupJob struct { // newCredentialStoreCleanupJob creates a new in-memory CredentialStoreCleanupJob. // // No options are supported. -func newCredentialStoreCleanupJob(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*CredentialStoreCleanupJob, error) { +func newCredentialStoreCleanupJob(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*CredentialStoreCleanupJob, error) { const op = "vault.newCredentialStoreCleanupJob" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := getOpts(opt...) @@ -898,10 +898,10 @@ type CredentialCleanupJob struct { // newCredentialCleanupJob creates a new in-memory CredentialCleanupJob. // // No options are supported. -func newCredentialCleanupJob(ctx context.Context, w db.Writer) (*CredentialCleanupJob, error) { +func newCredentialCleanupJob(w db.Writer) (*CredentialCleanupJob, error) { const op = "vault.newCredentialCleanupJob" if w == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db.Writer") } return &CredentialCleanupJob{ diff --git a/internal/credential/vault/jobs_test.go b/internal/credential/vault/jobs_test.go index 663cd528bfc..bff09c3b03f 100644 --- a/internal/credential/vault/jobs_test.go +++ b/internal/credential/vault/jobs_test.go @@ -49,7 +49,7 @@ func testVaultToken(t *testing.T, rw := db.New(conn) secret, _ := v.CreateToken(t) - inToken, err := newToken(context.Background(), cs.PublicId, []byte(secret.Auth.ClientToken), []byte(secret.Auth.Accessor), expiration) + inToken, err := newToken(cs.PublicId, []byte(secret.Auth.ClientToken), []byte(secret.Auth.Accessor), expiration) require.NoError(err) inToken.Status = string(status) @@ -116,7 +116,7 @@ func testVaultCred(t *testing.T, require.NoError(err) require.NotNil(secret) - id, err := newCredentialId(ctx) + id, err := newCredentialId() require.NoError(err) query := insertCredentialWithExpirationQuery @@ -219,7 +219,7 @@ func TestNewTokenRenewalJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newTokenRenewalJob(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.options...) + got, err := newTokenRenewalJob(tt.args.r, tt.args.w, tt.args.kms, tt.options...) if tt.wantErr { require.Error(err) assert.Nil(got) @@ -238,7 +238,6 @@ func TestNewTokenRenewalJob(t *testing.T) { func TestTokenRenewalJob_RunLimits(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -281,11 +280,11 @@ func TestTokenRenewalJob_RunLimits(t *testing.T) { _, token := v.CreateToken(t) in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - err = RegisterJobs(ctx, sche, rw, rw, kmsCache) + err = RegisterJobs(context.Background(), sche, rw, rw, kmsCache) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) // Create additional tokens and alternative between token statuses, revoked and @@ -304,19 +303,19 @@ func TestTokenRenewalJob_RunLimits(t *testing.T) { } // inserting new tokens moves the current token to a maintaining state, move it back to current and set expiration time - numRows, err := rw.Exec(ctx, testUpdateTokenStatusExpirationQuery, []any{CurrentToken, time.Minute.Seconds(), cs.outputToken.TokenHmac}) + numRows, err := rw.Exec(context.Background(), testUpdateTokenStatusExpirationQuery, []any{CurrentToken, time.Minute.Seconds(), cs.outputToken.TokenHmac}) require.NoError(err) assert.Equal(1, numRows) - r, err := newTokenRenewalJob(ctx, rw, rw, kmsCache, tt.opts...) + r, err := newTokenRenewalJob(rw, rw, kmsCache, tt.opts...) require.NoError(err) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(tt.wantLen, r.numTokens) // Set all tokens to revoked for next test - _, err = rw.Exec(ctx, "update credential_vault_token set status = 'revoked'", nil) + _, err = rw.Exec(context.Background(), "update credential_vault_token set status = 'revoked'", nil) assert.NoError(err) }) } @@ -324,7 +323,6 @@ func TestTokenRenewalJob_RunLimits(t *testing.T) { func TestTokenRenewalJob_Run(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") @@ -341,17 +339,17 @@ func TestTokenRenewalJob_Run(t *testing.T) { require.NoError(err) sche := scheduler.TestScheduler(t, conn, wrapper) - r, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + r, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, r) + err = sche.RegisterJob(context.Background(), r) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // No tokens should have been renewed since token expiration is 24 hours by default assert.Equal(0, r.numProcessed) @@ -362,15 +360,15 @@ func TestTokenRenewalJob_Run(t *testing.T) { expiredToken := testVaultToken(t, conn, wrapper, v, cs, ExpiredToken, time.Minute) // inserting new tokens moves the current token to a maintaining state, move it back to current and set expiration time - count, err := rw.Exec(ctx, testUpdateTokenStatusExpirationQuery, []any{CurrentToken, time.Minute.Seconds(), cs.outputToken.TokenHmac}) + count, err := rw.Exec(context.Background(), testUpdateTokenStatusExpirationQuery, []any{CurrentToken, time.Minute.Seconds(), cs.outputToken.TokenHmac}) require.NoError(err) assert.Equal(1, count) currentToken := allocToken() - require.NoError(rw.LookupWhere(ctx, ¤tToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) - databaseWrapper, err := kmsCache.GetWrapper(ctx, cs.ProjectId, kms.KeyPurposeDatabase) + require.NoError(rw.LookupWhere(context.Background(), ¤tToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + databaseWrapper, err := kmsCache.GetWrapper(context.Background(), cs.ProjectId, kms.KeyPurposeDatabase) require.NoError(err) - require.NoError(currentToken.decrypt(ctx, databaseWrapper)) + require.NoError(currentToken.decrypt(context.Background(), databaseWrapper)) // Sleep to move clock time.Sleep(time.Second * 2) @@ -390,7 +388,7 @@ func TestTokenRenewalJob_Run(t *testing.T) { require.NoError(err) // Run token renewal again - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // Current and maintaining token should have been processed assert.Equal(2, r.numProcessed) @@ -417,24 +415,23 @@ func TestTokenRenewalJob_Run(t *testing.T) { // Verify current and maintaining tokens were renewed in repo repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{currentToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{currentToken.TokenHmac})) assert.True(currentToken.GetExpirationTime().AsTime().Before(repoToken.GetExpirationTime().AsTime())) repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{maintainToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{maintainToken.TokenHmac})) assert.True(maintainToken.GetExpirationTime().AsTime().Before(repoToken.GetExpirationTime().AsTime())) // Verify revoked and expired tokens were not renewed in the repo repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{revokedToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{revokedToken.TokenHmac})) assert.Equal(revokedToken.GetExpirationTime().AsTime(), repoToken.GetExpirationTime().AsTime()) repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{expiredToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{expiredToken.TokenHmac})) assert.Equal(expiredToken.GetExpirationTime().AsTime(), repoToken.GetExpirationTime().AsTime()) } func TestTokenRenewalJob_RunExpired(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") @@ -452,28 +449,28 @@ func TestTokenRenewalJob_RunExpired(t *testing.T) { assert.NoError(err) require.NotNil(in) - r, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + r, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, r) + err = sche.RegisterJob(context.Background(), r) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) // Sleep to move clock and expire token time.Sleep(time.Second * 2) // Token should have expired in vault, run should now expire in repo - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(1, r.numTokens) // Verify token was expired in repo token := allocToken() - require.NoError(rw.LookupWhere(ctx, &token, "store_id = ?", []any{cs.GetPublicId()})) + require.NoError(rw.LookupWhere(context.Background(), &token, "store_id = ?", []any{cs.GetPublicId()})) assert.Equal(string(ExpiredToken), token.Status) // Updating the credential store with a token that will expire before the job scheduler can run should return an error @@ -482,7 +479,7 @@ func TestTokenRenewalJob_RunExpired(t *testing.T) { assert.NoError(err) require.NotNil(in) - cs, _, err = repo.UpdateCredentialStore(ctx, in, cs.Version+1, []string{"Token"}) + cs, _, err = repo.UpdateCredentialStore(context.Background(), in, cs.Version+1, []string{"Token"}) assert.Error(err) assert.Nil(cs) @@ -493,7 +490,7 @@ func TestTokenRenewalJob_RunExpired(t *testing.T) { require.NotNil(in) // Should return error because token ttl expires before the run job scheduler interval - cs, err = repo.CreateCredentialStore(ctx, in) + cs, err = repo.CreateCredentialStore(context.Background(), in) require.Error(err) require.Nil(cs) } @@ -501,8 +498,6 @@ func TestTokenRenewalJob_RunExpired(t *testing.T) { func TestTokenRenewalJob_NextRunIn(t *testing.T) { t.Parallel() - ctx := context.Background() - conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -589,7 +584,7 @@ func TestTokenRenewalJob_NextRunIn(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - r, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + r, err := newTokenRenewalJob(rw, rw, kmsCache) assert.NoError(err) require.NotNil(r) @@ -597,11 +592,11 @@ func TestTokenRenewalJob_NextRunIn(t *testing.T) { _, token := v.CreateToken(t) in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - err = RegisterJobs(ctx, sche, rw, rw, kmsCache) + err = RegisterJobs(context.Background(), sche, rw, rw, kmsCache) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) for _, token := range tt.tokens { @@ -609,18 +604,18 @@ func TestTokenRenewalJob_NextRunIn(t *testing.T) { } // inserting new tokens moves the current token to a maintaining state, move it back to current and set expiration time - count, err := rw.Exec(ctx, testUpdateTokenStatusExpirationQuery, []any{CurrentToken, tt.currentTokenExp.Seconds(), cs.outputToken.TokenHmac}) + count, err := rw.Exec(context.Background(), testUpdateTokenStatusExpirationQuery, []any{CurrentToken, tt.currentTokenExp.Seconds(), cs.outputToken.TokenHmac}) require.NoError(err) assert.Equal(1, count) } - got, err := r.NextRunIn(ctx) + got, err := r.NextRunIn(context.Background()) require.NoError(err) // Round to time.Minute to account for lost time between creating tokens and determining next run assert.Equal(tt.want.Round(time.Minute), got.Round(time.Minute)) // Set all tokens to revoked for next test - _, err = rw.Exec(ctx, "update credential_vault_token set status = 'revoked'", nil) + _, err = rw.Exec(context.Background(), "update credential_vault_token set status = 'revoked'", nil) assert.NoError(err) }) } @@ -693,7 +688,7 @@ func TestNewTokenRevocationJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newTokenRevocationJob(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.options...) + got, err := newTokenRevocationJob(tt.args.r, tt.args.w, tt.args.kms, tt.options...) if tt.wantErr { require.Error(err) assert.Nil(got) @@ -713,8 +708,6 @@ func TestNewTokenRevocationJob(t *testing.T) { func TestTokenRevocationJob_RunLimits(t *testing.T) { t.Parallel() - ctx := context.Background() - conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -758,12 +751,12 @@ func TestTokenRevocationJob_RunLimits(t *testing.T) { _, token := v.CreateToken(t) in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - err = RegisterJobs(ctx, sche, rw, rw, kmsCache) + err = RegisterJobs(context.Background(), sche, rw, rw, kmsCache) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) for i := 0; i < count*3; i++ { @@ -782,19 +775,19 @@ func TestTokenRevocationJob_RunLimits(t *testing.T) { } // inserting new tokens moves the current token to a maintaining state, move it back to current and set expiration time - numRows, err := rw.Exec(ctx, testUpdateTokenStatusExpirationQuery, []any{CurrentToken, time.Minute.Seconds(), cs.outputToken.TokenHmac}) + numRows, err := rw.Exec(context.Background(), testUpdateTokenStatusExpirationQuery, []any{CurrentToken, time.Minute.Seconds(), cs.outputToken.TokenHmac}) require.NoError(err) assert.Equal(1, numRows) - r, err := newTokenRevocationJob(ctx, rw, rw, kmsCache, tt.opts...) + r, err := newTokenRevocationJob(rw, rw, kmsCache, tt.opts...) require.NoError(err) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(tt.wantLen, r.numTokens) // Set all tokens to revoked for next test - _, err = rw.Exec(ctx, "update credential_vault_token set status = 'revoked'", nil) + _, err = rw.Exec(context.Background(), "update credential_vault_token set status = 'revoked'", nil) require.NoError(err) }) } @@ -802,7 +795,6 @@ func TestTokenRevocationJob_RunLimits(t *testing.T) { func TestTokenRevocationJob_Run(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") @@ -817,25 +809,25 @@ func TestTokenRevocationJob_Run(t *testing.T) { in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(ct)) require.NoError(err) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) - r, err := newTokenRevocationJob(ctx, rw, rw, kmsCache) + r, err := newTokenRevocationJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, r) + err = sche.RegisterJob(context.Background(), r) require.NoError(err) // No tokens should have been revoked since only the current token exists - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(0, r.numProcessed) @@ -845,7 +837,7 @@ func TestTokenRevocationJob_Run(t *testing.T) { revokeToken := testVaultToken(t, conn, wrapper, v, cs, RevokeToken, 5*time.Minute) // inserting new tokens moves the current token to a maintaining state, move it back to current and set expiration time - count, err := rw.Exec(ctx, testUpdateTokenStatusExpirationQuery, []any{CurrentToken, (5 * time.Minute).Seconds(), cs.outputToken.TokenHmac}) + count, err := rw.Exec(context.Background(), testUpdateTokenStatusExpirationQuery, []any{CurrentToken, (5 * time.Minute).Seconds(), cs.outputToken.TokenHmac}) require.NoError(err) assert.Equal(1, count) @@ -853,9 +845,9 @@ func TestTokenRevocationJob_Run(t *testing.T) { libPath := path.Join("database", "creds", "opened") cl, err := NewCredentialLibrary(cs.PublicId, libPath, WithMethod(MethodGet)) require.NoError(err) - cl.PublicId, err = newCredentialLibraryId(ctx) + cl.PublicId, err = newCredentialLibraryId() require.NoError(err) - err = rw.Create(ctx, cl) + err = rw.Create(context.Background(), cl) require.NoError(err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -864,7 +856,7 @@ func TestTokenRevocationJob_Run(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) target.TestCredentialLibrary(t, conn, tar.GetPublicId(), cl.GetPublicId()) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, @@ -884,7 +876,7 @@ func TestTokenRevocationJob_Run(t *testing.T) { // Running should revoke noCredsToken and the revokeToken even though it has active // credentials it has been marked for revocation - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(2, r.numProcessed) @@ -893,7 +885,7 @@ func TestTokenRevocationJob_Run(t *testing.T) { // Verify noCredsToken was set to revoked in repo repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{noCredsToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{noCredsToken.TokenHmac})) assert.Equal(string(RevokedToken), repoToken.Status) // Verify revokeToken was revoked in vault @@ -901,13 +893,13 @@ func TestTokenRevocationJob_Run(t *testing.T) { // Verify revokeToken was set to revoked in repo repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{revokeToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{revokeToken.TokenHmac})) assert.Equal(string(RevokedToken), repoToken.Status) // Verify revokeCred attached to revokeToken were marked as revoked lookupCred := allocCredential() lookupCred.PublicId = revokeCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(RevokedCredential), lookupCred.Status) // Verify credsToken was not revoked in vault @@ -916,12 +908,12 @@ func TestTokenRevocationJob_Run(t *testing.T) { // Revoke credential in repo query, queryValues := cred.updateStatusQuery(RevokedCredential) - rows, err := rw.Exec(ctx, query, queryValues) + rows, err := rw.Exec(context.Background(), query, queryValues) assert.Equal(1, rows) assert.NoError(err) // Running again should now revoke the credsToken - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(1, r.numProcessed) @@ -930,10 +922,10 @@ func TestTokenRevocationJob_Run(t *testing.T) { // Verify credsToken was set to revoked in repo repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{credsToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{credsToken.TokenHmac})) assert.Equal(string(RevokedToken), repoToken.Status) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // With only the current token remaining no tokens should be revoked assert.Equal(0, r.numProcessed) @@ -1006,7 +998,7 @@ func TestNewCredentialRenewalJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newCredentialRenewalJob(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.options...) + got, err := newCredentialRenewalJob(tt.args.r, tt.args.w, tt.args.kms, tt.options...) if tt.wantErr { require.Error(err) assert.Nil(got) @@ -1025,7 +1017,6 @@ func TestNewCredentialRenewalJob(t *testing.T) { func TestCredentialRenewalJob_RunLimits(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1039,23 +1030,23 @@ func TestCredentialRenewalJob_RunLimits(t *testing.T) { in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(ct)) require.NoError(t, err) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(t, err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(t, err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(t, err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(t, err) libPath := path.Join("database", "creds", "opened") cl, err := NewCredentialLibrary(cs.PublicId, libPath, WithMethod(MethodGet)) require.NoError(t, err) - cl.PublicId, err = newCredentialLibraryId(ctx) + cl.PublicId, err = newCredentialLibraryId() require.NoError(t, err) - err = rw.Create(ctx, cl) + err = rw.Create(context.Background(), cl) require.NoError(t, err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1064,7 +1055,7 @@ func TestCredentialRenewalJob_RunLimits(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) target.TestCredentialLibrary(t, conn, tar.GetPublicId(), cl.GetPublicId()) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, @@ -1125,15 +1116,15 @@ func TestCredentialRenewalJob_RunLimits(t *testing.T) { testVaultCred(t, conn, v, cl, sess, credsToken, status, 5*time.Minute) } - r, err := newCredentialRenewalJob(ctx, rw, rw, kmsCache, tt.opts...) + r, err := newCredentialRenewalJob(rw, rw, kmsCache, tt.opts...) require.NoError(err) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(tt.wantLen, r.numCreds) // Set all credentials to revoked for next test - _, err = rw.Exec(ctx, "update credential_vault_credential set status = 'revoked'", nil) + _, err = rw.Exec(context.Background(), "update credential_vault_credential set status = 'revoked'", nil) assert.NoError(err) }) } @@ -1141,7 +1132,6 @@ func TestCredentialRenewalJob_RunLimits(t *testing.T) { func TestCredentialRenewalJob_Run(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) v := NewTestVaultServer(t, WithDockerNetwork(true)) @@ -1153,23 +1143,23 @@ func TestCredentialRenewalJob_Run(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) kmsCache := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) _, token := v.CreateToken(t, WithPolicies([]string{"default", "boundary-controller", "database"})) credStoreIn, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, credStoreIn) + cs, err := repo.CreateCredentialStore(context.Background(), credStoreIn) require.NoError(err) libPath := path.Join("database", "creds", "opened") libIn, err := NewCredentialLibrary(cs.GetPublicId(), libPath) require.NoError(err) - cl, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), libIn) + cl, err := repo.CreateCredentialLibrary(context.Background(), prj.GetPublicId(), libIn) require.NoError(err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1190,12 +1180,12 @@ func TestCredentialRenewalJob_Run(t *testing.T) { }) csToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &csToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &csToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) - credRenewal, err := newCredentialRenewalJob(ctx, rw, rw, kmsCache) + credRenewal, err := newCredentialRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = credRenewal.Run(ctx) + err = credRenewal.Run(context.Background()) require.NoError(err) // No credentials should have been renewed assert.Equal(0, credRenewal.numCreds) @@ -1212,7 +1202,7 @@ func TestCredentialRenewalJob_Run(t *testing.T) { // Sleep to move clock time.Sleep(2 * time.Second) - err = credRenewal.Run(ctx) + err = credRenewal.Run(context.Background()) require.NoError(err) // The active credential should have been renewed assert.Equal(1, credRenewal.numCreds) @@ -1220,21 +1210,21 @@ func TestCredentialRenewalJob_Run(t *testing.T) { // Active credential expiration time should have been updated lookupCred := allocCredential() lookupCred.PublicId = activeCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Truef(lookupCred.ExpirationTime.AsTime().After(activeCred.ExpirationTime.AsTime()), "expected expiration time to be updated") // Revoke, Revoked and Expired credentials expiration times should not have changed lookupCred = allocCredential() lookupCred.PublicId = revokeCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(lookupCred.ExpirationTime.AsTime(), revokeCred.ExpirationTime.AsTime()) lookupCred = allocCredential() lookupCred.PublicId = revokedCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(lookupCred.ExpirationTime.AsTime(), revokedCred.ExpirationTime.AsTime()) lookupCred = allocCredential() lookupCred.PublicId = expiredCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(lookupCred.ExpirationTime.AsTime(), expiredCred.ExpirationTime.AsTime()) // Active credential should have a last renewal time in Vault @@ -1252,7 +1242,6 @@ func TestCredentialRenewalJob_Run(t *testing.T) { func TestCredentialRenewalJob_RunExpired(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) v := NewTestVaultServer(t, WithDockerNetwork(true)) @@ -1264,23 +1253,23 @@ func TestCredentialRenewalJob_RunExpired(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) kmsCache := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) _, token := v.CreateToken(t, WithPolicies([]string{"default", "boundary-controller", "database"})) credStoreIn, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, credStoreIn) + cs, err := repo.CreateCredentialStore(context.Background(), credStoreIn) require.NoError(err) libPath := path.Join("database", "creds", "opened") libIn, err := NewCredentialLibrary(cs.GetPublicId(), libPath) require.NoError(err) - cl, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), libIn) + cl, err := repo.CreateCredentialLibrary(context.Background(), prj.GetPublicId(), libIn) require.NoError(err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1289,7 +1278,7 @@ func TestCredentialRenewalJob_RunExpired(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, HostId: h.GetPublicId(), @@ -1301,9 +1290,9 @@ func TestCredentialRenewalJob_RunExpired(t *testing.T) { }) repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) - credRenewal, err := newCredentialRenewalJob(ctx, rw, rw, kmsCache) + credRenewal, err := newCredentialRenewalJob(rw, rw, kmsCache) require.NoError(err) _, cred := testVaultCred(t, conn, v, cl, sess, repoToken, ActiveCredential, time.Minute) @@ -1315,10 +1304,10 @@ func TestCredentialRenewalJob_RunExpired(t *testing.T) { // Credential status should still be active lookupCred := allocCredential() lookupCred.PublicId = cred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(ActiveCredential), lookupCred.Status) - err = credRenewal.Run(ctx) + err = credRenewal.Run(context.Background()) require.NoError(err) // The active credential should have been processed assert.Equal(1, credRenewal.numCreds) @@ -1326,13 +1315,12 @@ func TestCredentialRenewalJob_RunExpired(t *testing.T) { // Credential status should have been updated to expired lookupCred = allocCredential() lookupCred.PublicId = cred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(ExpiredCredential), lookupCred.Status) } func TestCredentialRenewalJob_NextRunIn(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1346,19 +1334,19 @@ func TestCredentialRenewalJob_NextRunIn(t *testing.T) { in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(ct)) require.NoError(t, err) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(t, err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(t, err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(t, err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(t, err) libPath := path.Join("database", "creds", "opened") libIn, err := NewCredentialLibrary(cs.GetPublicId(), libPath) require.NoError(t, err) - cl, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), libIn) + cl, err := repo.CreateCredentialLibrary(context.Background(), prj.GetPublicId(), libIn) require.NoError(t, err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1367,7 +1355,7 @@ func TestCredentialRenewalJob_NextRunIn(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) target.TestCredentialLibrary(t, conn, tar.GetPublicId(), cl.GetPublicId()) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, @@ -1467,7 +1455,7 @@ func TestCredentialRenewalJob_NextRunIn(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - r, err := newCredentialRenewalJob(ctx, rw, rw, kmsCache) + r, err := newCredentialRenewalJob(rw, rw, kmsCache) assert.NoError(err) require.NotNil(r) @@ -1476,13 +1464,13 @@ func TestCredentialRenewalJob_NextRunIn(t *testing.T) { testVaultCred(t, conn, v, cl, sess, token, cred.s, cred.e) } - got, err := r.NextRunIn(ctx) + got, err := r.NextRunIn(context.Background()) require.NoError(err) // Round to time.Minute to account for lost time between creating credentials and determining next run assert.Equal(tt.want.Round(time.Minute), got.Round(time.Minute)) // Set all credentials to revoked for next test - _, err = rw.Exec(ctx, "update credential_vault_credential set status = 'revoked'", nil) + _, err = rw.Exec(context.Background(), "update credential_vault_credential set status = 'revoked'", nil) assert.NoError(err) }) } @@ -1555,7 +1543,7 @@ func TestNewCredentialRevocationJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newCredentialRevocationJob(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.options...) + got, err := newCredentialRevocationJob(tt.args.r, tt.args.w, tt.args.kms, tt.options...) if tt.wantErr { require.Error(err) assert.Nil(got) @@ -1574,7 +1562,6 @@ func TestNewCredentialRevocationJob(t *testing.T) { func TestCredentialRevocationJob_RunLimits(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1588,21 +1575,21 @@ func TestCredentialRevocationJob_RunLimits(t *testing.T) { in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(ct)) require.NoError(t, err) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(t, err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(t, err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(t, err) - cs, err := repo.CreateCredentialStore(ctx, in) + cs, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(t, err) libPath := path.Join("database", "creds", "opened") cl, err := NewCredentialLibrary(cs.PublicId, libPath, WithMethod(MethodGet)) require.NoError(t, err) - cl.PublicId, err = newCredentialLibraryId(ctx) + cl.PublicId, err = newCredentialLibraryId() require.NoError(t, err) - err = rw.Create(ctx, cl) + err = rw.Create(context.Background(), cl) require.NoError(t, err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1611,7 +1598,7 @@ func TestCredentialRevocationJob_RunLimits(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) target.TestCredentialLibrary(t, conn, tar.GetPublicId(), cl.GetPublicId()) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, @@ -1624,7 +1611,7 @@ func TestCredentialRevocationJob_RunLimits(t *testing.T) { }) repoToken := allocToken() - require.NoError(t, rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + require.NoError(t, rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) count := 10 tests := []struct { @@ -1674,15 +1661,15 @@ func TestCredentialRevocationJob_RunLimits(t *testing.T) { testVaultCred(t, conn, v, cl, sess, repoToken, status, 5*time.Minute) } - r, err := newCredentialRevocationJob(ctx, rw, rw, kmsCache, tt.opts...) + r, err := newCredentialRevocationJob(rw, rw, kmsCache, tt.opts...) require.NoError(err) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(tt.wantLen, r.numCreds) // Set all credentials to revoked for next test - _, err = rw.Exec(ctx, "update credential_vault_credential set status = 'revoked'", nil) + _, err = rw.Exec(context.Background(), "update credential_vault_credential set status = 'revoked'", nil) assert.NoError(err) }) } @@ -1690,7 +1677,6 @@ func TestCredentialRevocationJob_RunLimits(t *testing.T) { func TestCredentialRevocationJob_Run(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) v := NewTestVaultServer(t, WithDockerNetwork(true)) @@ -1702,23 +1688,23 @@ func TestCredentialRevocationJob_Run(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) kmsCache := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) _, token := v.CreateToken(t, WithPolicies([]string{"default", "boundary-controller", "database"})) credStoreIn, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, credStoreIn) + cs, err := repo.CreateCredentialStore(context.Background(), credStoreIn) require.NoError(err) libPath := path.Join("database", "creds", "opened") libIn, err := NewCredentialLibrary(cs.GetPublicId(), libPath) require.NoError(err) - cl, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), libIn) + cl, err := repo.CreateCredentialLibrary(context.Background(), prj.GetPublicId(), libIn) require.NoError(err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1727,7 +1713,7 @@ func TestCredentialRevocationJob_Run(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, HostId: h.GetPublicId(), @@ -1739,12 +1725,12 @@ func TestCredentialRevocationJob_Run(t *testing.T) { }) repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) - r, err := newCredentialRevocationJob(ctx, rw, rw, kmsCache) + r, err := newCredentialRevocationJob(rw, rw, kmsCache) require.NoError(err) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // No credentials should have been revoked assert.Equal(0, r.numCreds) @@ -1757,13 +1743,13 @@ func TestCredentialRevocationJob_Run(t *testing.T) { // Verify the revokeCred has a status of revoke lookupCred := allocCredential() lookupCred.PublicId = revokeCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(RevokeCredential), lookupCred.Status) // Verify revokeCred is valid in testDb assert.NoError(testDb.ValidateCredential(t, revokeSecret)) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // The revoke credential should have been revoked assert.Equal(1, r.numCreds) @@ -1771,7 +1757,7 @@ func TestCredentialRevocationJob_Run(t *testing.T) { // revokeCred should now have a status of revoked lookupCred = allocCredential() lookupCred.PublicId = revokeCred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(RevokedCredential), lookupCred.Status) // revokeCred should no longer be valid in test database @@ -1785,7 +1771,6 @@ func TestCredentialRevocationJob_Run(t *testing.T) { func TestCredentialRevocationJob_RunDeleted(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) v := NewTestVaultServer(t, WithDockerNetwork(true)) @@ -1797,23 +1782,23 @@ func TestCredentialRevocationJob_RunDeleted(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) kmsCache := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) _, token := v.CreateToken(t, WithPolicies([]string{"default", "boundary-controller", "database"})) credStoreIn, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, credStoreIn) + cs, err := repo.CreateCredentialStore(context.Background(), credStoreIn) require.NoError(err) libPath := path.Join("database", "creds", "opened") libIn, err := NewCredentialLibrary(cs.GetPublicId(), libPath) require.NoError(err) - cl, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), libIn) + cl, err := repo.CreateCredentialLibrary(context.Background(), prj.GetPublicId(), libIn) require.NoError(err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -1822,7 +1807,7 @@ func TestCredentialRevocationJob_RunDeleted(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) sess := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, HostId: h.GetPublicId(), @@ -1834,24 +1819,24 @@ func TestCredentialRevocationJob_RunDeleted(t *testing.T) { }) repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) - r, err := newCredentialRevocationJob(ctx, rw, rw, kmsCache) + r, err := newCredentialRevocationJob(rw, rw, kmsCache) require.NoError(err) secret, cred := testVaultCred(t, conn, v, cl, sess, repoToken, ActiveCredential, 5*time.Hour) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // No credentials should have been revoked as expiration is 5 hours from now assert.Equal(0, r.numCreds) // Deleting the library should set the cred library_id to null, but not revoke the cred - count, err := rw.Delete(ctx, cl) + count, err := rw.Delete(context.Background(), cl) require.NoError(err) assert.Equal(1, count) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // No credentials should have been revoked assert.Equal(0, r.numCreds) @@ -1859,7 +1844,7 @@ func TestCredentialRevocationJob_RunDeleted(t *testing.T) { // Verify the cred has a status of active with an empty libraryId lookupCred := allocCredential() lookupCred.PublicId = cred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(ActiveCredential), lookupCred.Status) assert.Empty(lookupCred.LibraryId) @@ -1867,18 +1852,18 @@ func TestCredentialRevocationJob_RunDeleted(t *testing.T) { assert.NoError(testDb.ValidateCredential(t, secret)) // Deleting the session should set the cred session_id to null and schedule cred for revocation - count, err = rw.Delete(ctx, sess) + count, err = rw.Delete(context.Background(), sess) require.NoError(err) assert.Equal(1, count) // cred should now have a status of revoke and empty sessionId lookupCred = allocCredential() lookupCred.PublicId = cred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Empty(lookupCred.SessionId) assert.Equal(string(RevokeCredential), lookupCred.Status) - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) // The revoke credential should have been revoked assert.Equal(1, r.numCreds) @@ -1886,7 +1871,7 @@ func TestCredentialRevocationJob_RunDeleted(t *testing.T) { // cred should now have a status of revoked lookupCred = allocCredential() lookupCred.PublicId = cred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(RevokedCredential), lookupCred.Status) // secret should no longer be valid in test database @@ -1960,7 +1945,7 @@ func TestNewCredentialStoreCleanupJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newCredentialStoreCleanupJob(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.options...) + got, err := newCredentialStoreCleanupJob(tt.args.r, tt.args.w, tt.args.kms, tt.options...) if tt.wantErr { require.Error(err) assert.Nil(got) @@ -1979,7 +1964,6 @@ func TestNewCredentialStoreCleanupJob(t *testing.T) { func TestCredentialStoreCleanupJob_Run(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") @@ -1993,139 +1977,139 @@ func TestCredentialStoreCleanupJob_Run(t *testing.T) { in, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(ct)) require.NoError(err) sche := scheduler.TestScheduler(t, conn, wrapper) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) err = sche.RegisterJob(context.Background(), j) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) - cs1, err := repo.CreateCredentialStore(ctx, in) + cs1, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) _, ct = v.CreateToken(t) in, err = NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(ct)) require.NoError(err) - cs2, err := repo.CreateCredentialStore(ctx, in) + cs2, err := repo.CreateCredentialStore(context.Background(), in) require.NoError(err) // Get token hmac for verifications below repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "store_id = ?", []any{cs1.PublicId})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "store_id = ?", []any{cs1.PublicId})) cs1TokenHmac := repoToken.TokenHmac repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "store_id = ?", []any{cs2.PublicId})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "store_id = ?", []any{cs2.PublicId})) cs2TokenHmac := repoToken.TokenHmac // create second token on cs2 secondToken := testVaultToken(t, conn, wrapper, v, cs2, MaintainingToken, time.Hour) - r, err := newCredentialStoreCleanupJob(ctx, rw, rw, kmsCache) + r, err := newCredentialStoreCleanupJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, r) + err = sche.RegisterJob(context.Background(), r) require.NoError(err) // No credential stores should have been cleaned up - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(0, r.numStores) // Register token revocation job needed for delete - j1, err := newTokenRevocationJob(ctx, rw, rw, kmsCache) + j1, err := newTokenRevocationJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j1) + err = sche.RegisterJob(context.Background(), j1) require.NoError(err) // Soft delete both credential stores - count, err := repo.DeleteCredentialStore(ctx, cs1.PublicId) + count, err := repo.DeleteCredentialStore(context.Background(), cs1.PublicId) require.NoError(err) assert.Equal(1, count) - count, err = repo.DeleteCredentialStore(ctx, cs2.PublicId) + count, err = repo.DeleteCredentialStore(context.Background(), cs2.PublicId) require.NoError(err) assert.Equal(1, count) // Verify tokens have been set to revoke repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "store_id = ?", []any{cs1.PublicId})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "store_id = ?", []any{cs1.PublicId})) assert.Equal(string(RevokeToken), repoToken.Status) repoToken = allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "store_id = ?", []any{cs2.PublicId})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "store_id = ?", []any{cs2.PublicId})) assert.Equal(string(RevokeToken), repoToken.Status) // Both soft deleted credential stores should not be cleaned up yet - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(0, r.numStores) // Update cs1 token to be marked as revoked - count, err = rw.Exec(ctx, updateTokenStatusQuery, []any{RevokedToken, cs1TokenHmac}) + count, err = rw.Exec(context.Background(), updateTokenStatusQuery, []any{RevokedToken, cs1TokenHmac}) require.NoError(err) assert.Equal(1, count) // cs1 should be deleted - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(1, r.numStores) // Lookup of cs1 and its token should fail agg := allocListLookupStore() agg.PublicId = cs1.PublicId - err = rw.LookupByPublicId(ctx, agg) + err = rw.LookupByPublicId(context.Background(), agg) require.Error(err) assert.True(errors.IsNotFoundError(err)) repoToken = allocToken() - err = rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs1TokenHmac}) + err = rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs1TokenHmac}) require.Error(err) assert.True(errors.IsNotFoundError(err)) // Lookup of cs2 and its token should not error - _, err = repo.LookupCredentialStore(ctx, cs2.PublicId) + _, err = repo.LookupCredentialStore(context.Background(), cs2.PublicId) require.NoError(err) repoToken = allocToken() - err = rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs2TokenHmac}) + err = rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs2TokenHmac}) require.NoError(err) // Update cs2 token expiration time - count, err = rw.Exec(ctx, "update credential_vault_token set expiration_time = now() where token_hmac = ?;", []any{cs2TokenHmac}) + count, err = rw.Exec(context.Background(), "update credential_vault_token set expiration_time = now() where token_hmac = ?;", []any{cs2TokenHmac}) require.NoError(err) assert.Equal(1, count) // cs2 still has a second token not yet revoked/expired - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(0, r.numStores) // Lookup of cs2 and its token should not error - _, err = repo.LookupCredentialStore(ctx, cs2.PublicId) + _, err = repo.LookupCredentialStore(context.Background(), cs2.PublicId) require.NoError(err) repoToken = allocToken() - err = rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs2TokenHmac}) + err = rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs2TokenHmac}) require.NoError(err) // set secondToken with an expired status - count, err = rw.Exec(ctx, updateTokenStatusQuery, []any{ExpiredToken, secondToken.TokenHmac}) + count, err = rw.Exec(context.Background(), updateTokenStatusQuery, []any{ExpiredToken, secondToken.TokenHmac}) require.NoError(err) assert.Equal(1, count) // With no un-expired or un-revoked tokens cs2 should now be deleted - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(1, r.numStores) // Lookup of cs2 and its token should fail agg = allocListLookupStore() agg.PublicId = cs2.PublicId - err = rw.LookupByPublicId(ctx, agg) + err = rw.LookupByPublicId(context.Background(), agg) require.Error(err) assert.True(errors.IsNotFoundError(err)) repoToken = allocToken() - err = rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs2TokenHmac}) + err = rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs2TokenHmac}) require.Error(err) assert.True(errors.IsNotFoundError(err)) - err = rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{secondToken.TokenHmac}) + err = rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{secondToken.TokenHmac}) require.Error(err) assert.True(errors.IsNotFoundError(err)) } @@ -2163,7 +2147,7 @@ func TestNewCredentialCleanupJob(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := newCredentialCleanupJob(context.Background(), tt.args.w) + got, err := newCredentialCleanupJob(tt.args.w) if tt.wantErr { require.Error(err) assert.Nil(got) @@ -2179,7 +2163,6 @@ func TestNewCredentialCleanupJob(t *testing.T) { func TestCredentialCleanupJob_Run(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) v := NewTestVaultServer(t, WithDockerNetwork(true)) @@ -2191,23 +2174,23 @@ func TestCredentialCleanupJob_Run(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) kmsCache := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sche) + repo, err := NewRepository(rw, rw, kmsCache, sche) require.NoError(err) _, token := v.CreateToken(t, WithPolicies([]string{"default", "boundary-controller", "database"})) credStoreIn, err := NewCredentialStore(prj.GetPublicId(), v.Addr, []byte(token)) require.NoError(err) - j, err := newTokenRenewalJob(ctx, rw, rw, kmsCache) + j, err := newTokenRenewalJob(rw, rw, kmsCache) require.NoError(err) - err = sche.RegisterJob(ctx, j) + err = sche.RegisterJob(context.Background(), j) require.NoError(err) - cs, err := repo.CreateCredentialStore(ctx, credStoreIn) + cs, err := repo.CreateCredentialStore(context.Background(), credStoreIn) require.NoError(err) libPath := path.Join("database", "creds", "opened") libIn, err := NewCredentialLibrary(cs.GetPublicId(), libPath) require.NoError(err) - cl, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), libIn) + cl, err := repo.CreateCredentialLibrary(context.Background(), prj.GetPublicId(), libIn) require.NoError(err) at := authtoken.TestAuthToken(t, conn, kmsCache, org.GetPublicId()) @@ -2216,7 +2199,7 @@ func TestCredentialCleanupJob_Run(t *testing.T) { hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] static.TestSetMembers(t, conn, hs.GetPublicId(), []*static.Host{h}) - tar := tcp.TestTarget(ctx, t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) + tar := tcp.TestTarget(context.Background(), t, conn, prj.GetPublicId(), "test", target.WithHostSources([]string{hs.GetPublicId()})) sess1 := session.TestSession(t, conn, wrapper, session.ComposedOf{ UserId: uId, HostId: h.GetPublicId(), @@ -2237,9 +2220,9 @@ func TestCredentialCleanupJob_Run(t *testing.T) { }) repoToken := allocToken() - require.NoError(rw.LookupWhere(ctx, &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) + require.NoError(rw.LookupWhere(context.Background(), &repoToken, "token_hmac = ?", []any{cs.outputToken.TokenHmac})) - r, err := newCredentialCleanupJob(ctx, rw) + r, err := newCredentialCleanupJob(rw) require.NoError(err) _, sess1Cred1 := testVaultCred(t, conn, v, cl, sess1, repoToken, ActiveCredential, 5*time.Hour) @@ -2248,56 +2231,56 @@ func TestCredentialCleanupJob_Run(t *testing.T) { _, sess2Cred := testVaultCred(t, conn, v, cl, sess2, repoToken, ActiveCredential, 5*time.Hour) // No credentials should be cleaned up - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(0, r.numCreds) // Delete sess1 - count, err := rw.Delete(ctx, sess1) + count, err := rw.Delete(context.Background(), sess1) require.NoError(err) assert.Equal(1, count) // Credentials are still in the revoke state so none should be deleted yet - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(0, r.numCreds) query, queryArgs := sess1Cred1.updateStatusQuery(RevokedCredential) - count, err = rw.Exec(ctx, query, queryArgs) + count, err = rw.Exec(context.Background(), query, queryArgs) require.NoError(err) assert.Equal(1, count) query, queryArgs = sess1Cred2.updateStatusQuery(ExpiredCredential) - count, err = rw.Exec(ctx, query, queryArgs) + count, err = rw.Exec(context.Background(), query, queryArgs) require.NoError(err) assert.Equal(1, count) query, queryArgs = sess1Cred3.updateStatusQuery(UnknownCredentialStatus) - count, err = rw.Exec(ctx, query, queryArgs) + count, err = rw.Exec(context.Background(), query, queryArgs) require.NoError(err) assert.Equal(1, count) query, queryArgs = sess2Cred.updateStatusQuery(RevokedCredential) - count, err = rw.Exec(ctx, query, queryArgs) + count, err = rw.Exec(context.Background(), query, queryArgs) require.NoError(err) assert.Equal(1, count) // Only the three credentials associated with the deleted session should be deleted - err = r.Run(ctx) + err = r.Run(context.Background()) require.NoError(err) assert.Equal(3, r.numCreds) // Session 1 creds should no longer exist lookupCred := allocCredential() lookupCred.PublicId = sess1Cred1.PublicId - require.Error(rw.LookupById(ctx, lookupCred)) + require.Error(rw.LookupById(context.Background(), lookupCred)) lookupCred.PublicId = sess1Cred2.PublicId - require.Error(rw.LookupById(ctx, lookupCred)) + require.Error(rw.LookupById(context.Background(), lookupCred)) lookupCred.PublicId = sess1Cred3.PublicId - require.Error(rw.LookupById(ctx, lookupCred)) + require.Error(rw.LookupById(context.Background(), lookupCred)) // Session 2 creds should still exist but be revoked lookupCred.PublicId = sess2Cred.PublicId - require.NoError(rw.LookupById(ctx, lookupCred)) + require.NoError(rw.LookupById(context.Background(), lookupCred)) assert.Equal(string(RevokedCredential), lookupCred.Status) } diff --git a/internal/credential/vault/options_test.go b/internal/credential/vault/options_test.go index 928eeecd554..680d2364b5b 100644 --- a/internal/credential/vault/options_test.go +++ b/internal/credential/vault/options_test.go @@ -4,7 +4,6 @@ package vault import ( - "context" "testing" "github.com/hashicorp/boundary/internal/credential" @@ -68,7 +67,7 @@ func Test_GetOpts(t *testing.T) { inCert := testClientCert(t, testCaCert(t)) cert := inCert.Cert.Cert key := inCert.Cert.Key - clientCert, err := NewClientCertificate(context.Background(), cert, key) + clientCert, err := NewClientCertificate(cert, key) assert.NoError(t, err) assert.NotNil(t, clientCert) opts := getOpts(WithClientCert(clientCert)) diff --git a/internal/credential/vault/private_credential.go b/internal/credential/vault/private_credential.go index 98d8022e8e2..c71603d7075 100644 --- a/internal/credential/vault/private_credential.go +++ b/internal/credential/vault/private_credential.go @@ -100,7 +100,7 @@ func (pc *privateCredential) client(ctx context.Context) (vaultClient, error) { client, err := vaultClientFactoryFn(ctx, clientConfig, WithWorkerFilter(pc.WorkerFilter)) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create vault client")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("unable to create vault client")) } return client, nil } diff --git a/internal/credential/vault/private_library.go b/internal/credential/vault/private_library.go index de46fc43d5c..b52af5a064a 100644 --- a/internal/credential/vault/private_library.go +++ b/internal/credential/vault/private_library.go @@ -285,7 +285,7 @@ func (pl *genericIssuingCredentialLibrary) client(ctx context.Context) (vaultCli client, err := vaultClientFactoryFn(ctx, clientConfig, WithWorkerFilter(pl.WorkerFilter)) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create vault client")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("unable to create vault client")) } return client, nil } @@ -309,7 +309,7 @@ func (pl *genericIssuingCredentialLibrary) retrieveCredential(ctx context.Contex // Get the credential ID early. No need to get a secret from Vault // if there is no way to save it in the database. - credId, err := newCredentialId(ctx) + credId, err := newCredentialId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -367,7 +367,7 @@ func (pl *genericIssuingCredentialLibrary) retrieveCredential(ctx context.Contex } leaseDuration := time.Duration(secret.LeaseDuration) * time.Second - cred, err := newCredential(ctx, pl.GetPublicId(), secret.LeaseID, pl.TokenHmac, leaseDuration) + cred, err := newCredential(pl.GetPublicId(), secret.LeaseID, pl.TokenHmac, leaseDuration) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -390,7 +390,7 @@ func (pl *genericIssuingCredentialLibrary) TableName() string { func (r *Repository) getIssueCredLibraries(ctx context.Context, requests []credential.Request) ([]issuingCredentialLibrary, error) { const op = "vault.(Repository).getIssueCredLibraries" - mapper, err := newMapper(ctx, requests) + mapper, err := newMapper(requests) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -660,13 +660,13 @@ type requestMap struct { ids map[string][]credential.Purpose } -func newMapper(ctx context.Context, requests []credential.Request) (*requestMap, error) { +func newMapper(requests []credential.Request) (*requestMap, error) { ids := make(map[string][]credential.Purpose, len(requests)) for _, req := range requests { if purps, ok := ids[req.SourceId]; ok { for _, purp := range purps { if purp == req.Purpose { - return nil, errors.E(ctx, errors.WithCode(errors.InvalidParameter), errors.WithMsg("duplicate library and purpose")) + return nil, errors.EDeprecated(errors.WithCode(errors.InvalidParameter), errors.WithMsg("duplicate library and purpose")) } } } @@ -765,7 +765,7 @@ func (lib *sshCertIssuingCredentialLibrary) client(ctx context.Context) (vaultCl client, err := vaultClientFactoryFn(ctx, clientConfig, WithWorkerFilter(lib.WorkerFilter)) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create vault client")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("unable to create vault client")) } return client, nil } @@ -870,7 +870,7 @@ func (lib *sshCertIssuingCredentialLibrary) retrieveCredential(ctx context.Conte // Get the credential ID early. No need to get a secret from Vault // if there is no way to save it in the database. - credId, err := newCredentialId(ctx) + credId, err := newCredentialId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -979,7 +979,7 @@ func (lib *sshCertIssuingCredentialLibrary) retrieveCredential(ctx context.Conte } leaseDuration := time.Duration(secret.LeaseDuration) * time.Second - cred, err := newCredential(ctx, lib.GetPublicId(), secret.LeaseID, lib.TokenHmac, leaseDuration) + cred, err := newCredential(lib.GetPublicId(), secret.LeaseID, lib.TokenHmac, leaseDuration) if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/credential/vault/private_library_test.go b/internal/credential/vault/private_library_test.go index ef8ee861262..ef9f5ccccc4 100644 --- a/internal/credential/vault/private_library_test.go +++ b/internal/credential/vault/private_library_test.go @@ -53,7 +53,7 @@ func TestRepository_getPrivateLibraries(t *testing.T) { _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -65,7 +65,7 @@ func TestRepository_getPrivateLibraries(t *testing.T) { } if tt.tls == TestClientTLS { opts = append(opts, WithCACert(v.CaCert)) - clientCert, err := NewClientCertificate(ctx, v.ClientCert, v.ClientKey) + clientCert, err := NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(err) opts = append(opts, WithClientCert(clientCert)) } @@ -312,8 +312,6 @@ func TestRepository_getPrivateLibraries(t *testing.T) { } func TestRequestMap(t *testing.T) { - ctx := context.Background() - type args struct { requests []credential.Request } @@ -392,7 +390,7 @@ func TestRequestMap(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - mapper, err := newMapper(ctx, tt.args.requests) + mapper, err := newMapper(tt.args.requests) if tt.wantErr { assert.Error(err) assert.Nil(mapper) @@ -1257,7 +1255,7 @@ func TestRepository_sshCertIssuingCredentialLibrary_retrieveCredential(t *testin sche := scheduler.TestScheduler(t, conn, wrapper) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(t, err) require.NotNil(t, repo) @@ -1348,7 +1346,7 @@ func TestRepository_sshCertIssuingCredentialLibrary_retrieveCredential(t *testin lib, err := NewSSHCertificateCredentialLibrary(cs.GetPublicId(), tt.vaulthPath, tt.username, tt.opts...) require.NoError(err) require.NotNil(lib) - lib.PublicId, err = newSSHCertificateCredentialLibraryId(ctx) + lib.PublicId, err = newSSHCertificateCredentialLibraryId() require.NoError(err) _, err = rw.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, diff --git a/internal/credential/vault/private_store.go b/internal/credential/vault/private_store.go index e8f97d36273..c2809346570 100644 --- a/internal/credential/vault/private_store.go +++ b/internal/credential/vault/private_store.go @@ -156,7 +156,7 @@ func (ps *clientStore) client(ctx context.Context) (vaultClient, error) { client, err := vaultClientFactoryFn(ctx, clientConfig, WithWorkerFilter(ps.WorkerFilter)) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create vault client")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("unable to create vault client")) } return client, nil } diff --git a/internal/credential/vault/private_store_test.go b/internal/credential/vault/private_store_test.go index 62dce4c477d..d7e249b57f6 100644 --- a/internal/credential/vault/private_store_test.go +++ b/internal/credential/vault/private_store_test.go @@ -44,7 +44,7 @@ func TestRepository_lookupPrivateStore(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -60,7 +60,7 @@ func TestRepository_lookupPrivateStore(t *testing.T) { } if tt.tls == TestClientTLS { opts = append(opts, WithCACert(v.CaCert)) - clientCert, err := NewClientCertificate(ctx, v.ClientCert, v.ClientKey) + clientCert, err := NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(err) opts = append(opts, WithClientCert(clientCert)) } diff --git a/internal/credential/vault/public_ids.go b/internal/credential/vault/public_ids.go index e63da291ac0..45198a1a42a 100644 --- a/internal/credential/vault/public_ids.go +++ b/internal/credential/vault/public_ids.go @@ -4,8 +4,6 @@ package vault import ( - "context" - "github.com/hashicorp/boundary/globals" "github.com/hashicorp/boundary/internal/credential" "github.com/hashicorp/boundary/internal/db" @@ -35,34 +33,34 @@ const ( SSHCertificateLibrarySubtype = subtypes.Subtype("vault-ssh-certificate") ) -func newCredentialStoreId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.VaultCredentialStorePrefix) +func newCredentialStoreId() (string, error) { + id, err := db.NewPublicId(globals.VaultCredentialStorePrefix) if err != nil { - return "", errors.Wrap(ctx, err, "vault.newCredentialStoreId") + return "", errors.WrapDeprecated(err, "vault.newCredentialStoreId") } return id, nil } -func newCredentialId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, DynamicCredentialPrefix) +func newCredentialId() (string, error) { + id, err := db.NewPublicId(DynamicCredentialPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "vault.newCredentialId") + return "", errors.WrapDeprecated(err, "vault.newCredentialId") } return id, nil } -func newCredentialLibraryId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.VaultCredentialLibraryPrefix) +func newCredentialLibraryId() (string, error) { + id, err := db.NewPublicId(globals.VaultCredentialLibraryPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "vault.newCredentialLibraryId") + return "", errors.WrapDeprecated(err, "vault.newCredentialLibraryId") } return id, nil } -func newSSHCertificateCredentialLibraryId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.VaultSshCertificateCredentialLibraryPrefix) +func newSSHCertificateCredentialLibraryId() (string, error) { + id, err := db.NewPublicId(globals.VaultSshCertificateCredentialLibraryPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "vault.newSSHCertificateCredentialLibraryPrefix") + return "", errors.WrapDeprecated(err, "vault.newSSHCertificateCredentialLibraryPrefix") } return id, nil } diff --git a/internal/credential/vault/repository.go b/internal/credential/vault/repository.go index 7fb1f56c026..3d2f369b740 100644 --- a/internal/credential/vault/repository.go +++ b/internal/credential/vault/repository.go @@ -4,8 +4,6 @@ package vault import ( - "context" - "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/kms" @@ -28,17 +26,17 @@ type Repository struct { // only be used for one transaction and it is not safe for concurrent go // routines to access it. WithLimit option is used as a repo wide default // limit applied to all ListX methods. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, scheduler *scheduler.Scheduler, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, scheduler *scheduler.Scheduler, opt ...Option) (*Repository, error) { const op = "vault.NewRepository" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "kms") case scheduler == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "scheduler") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "scheduler") } opts := getOpts(opt...) diff --git a/internal/credential/vault/repository_credential_library.go b/internal/credential/vault/repository_credential_library.go index 0cbdde7f7da..1f6dd2799ee 100644 --- a/internal/credential/vault/repository_credential_library.go +++ b/internal/credential/vault/repository_credential_library.go @@ -57,7 +57,7 @@ func (r *Repository) CreateCredentialLibrary(ctx context.Context, projectId stri return nil, err // intentionally not wrapped. } - id, err := newCredentialLibraryId(ctx) + id, err := newCredentialLibraryId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/credential/vault/repository_credential_library_test.go b/internal/credential/vault/repository_credential_library_test.go index bc80bc1e7f7..92d716e8416 100644 --- a/internal/credential/vault/repository_credential_library_test.go +++ b/internal/credential/vault/repository_credential_library_test.go @@ -447,7 +447,7 @@ func TestRepository_CreateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) got, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), tt.in, tt.opts...) @@ -505,7 +505,7 @@ func TestRepository_CreateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -538,7 +538,7 @@ func TestRepository_CreateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) @@ -1472,7 +1472,7 @@ func TestRepository_UpdateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1548,7 +1548,7 @@ func TestRepository_UpdateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1582,7 +1582,7 @@ func TestRepository_UpdateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1630,7 +1630,7 @@ func TestRepository_UpdateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1671,7 +1671,7 @@ func TestRepository_UpdateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1875,7 +1875,7 @@ func TestRepository_LookupCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) orig, err := repo.CreateCredentialLibrary(ctx, prj.GetPublicId(), tt.in) @@ -1917,7 +1917,7 @@ func TestRepository_LookupCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) // test @@ -1933,10 +1933,10 @@ func TestRepository_LookupCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) - badId, err := newCredentialLibraryId(ctx) + badId, err := newCredentialLibraryId() assert.NoError(err) require.NotNil(badId) // test @@ -1948,7 +1948,6 @@ func TestRepository_LookupCredentialLibrary(t *testing.T) { func TestRepository_DeleteCredentialLibrary(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1958,7 +1957,7 @@ func TestRepository_DeleteCredentialLibrary(t *testing.T) { cs := TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 1)[0] l := TestCredentialLibraries(t, conn, wrapper, cs.GetPublicId(), 1)[0] - badId, err := newCredentialLibraryId(ctx) + badId, err := newCredentialLibraryId() require.NoError(t, err) require.NotNil(t, badId) @@ -1988,9 +1987,10 @@ func TestRepository_DeleteCredentialLibrary(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) + ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -2009,9 +2009,10 @@ func TestRepository_DeleteCredentialLibrary(t *testing.T) { // setup assert, require := assert.New(t), require.New(t) + ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -2055,7 +2056,7 @@ func TestRepository_ListCredentialLibraries(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -2099,7 +2100,7 @@ func TestRepository_ListCredentialLibraries(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) // test @@ -2115,7 +2116,7 @@ func TestRepository_ListCredentialLibraries(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -2189,7 +2190,7 @@ func TestRepository_ListCredentialLibraries_Limits(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, sche, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) got, err := repo.ListCredentialLibraries(ctx, libs[0].StoreId, tt.listOpts...) diff --git a/internal/credential/vault/repository_credential_store.go b/internal/credential/vault/repository_credential_store.go index a8d5b2d83e5..f4619a17533 100644 --- a/internal/credential/vault/repository_credential_store.go +++ b/internal/credential/vault/repository_credential_store.go @@ -68,7 +68,7 @@ func (r *Repository) CreateCredentialStore(ctx context.Context, cs *CredentialSt cs = cs.clone() - id, err := newCredentialStoreId(ctx) + id, err := newCredentialStoreId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -85,7 +85,7 @@ func (r *Repository) CreateCredentialStore(ctx context.Context, cs *CredentialSt if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to lookup vault token")) } - if err := validateTokenLookup(ctx, op, tokenLookup); err != nil { + if err := validateTokenLookup(op, tokenLookup); err != nil { return nil, err } @@ -114,7 +114,7 @@ func (r *Repository) CreateCredentialStore(ctx context.Context, cs *CredentialSt return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to get vault token accessor")) } - token, err := newToken(ctx, id, cs.inputToken, []byte(accessor), tokenExpires) + token, err := newToken(id, cs.inputToken, []byte(accessor), tokenExpires) if err != nil { return nil, err } @@ -216,35 +216,35 @@ func (r *Repository) CreateCredentialStore(ctx context.Context, cs *CredentialSt return newCredentialStore, nil } -func validateTokenLookup(ctx context.Context, op errors.Op, s *vault.Secret) error { +func validateTokenLookup(op errors.Op, s *vault.Secret) error { if s.Data == nil { - return errors.New(ctx, errors.InvalidParameter, op, "vault secret is not a token lookup") + return errors.NewDeprecated(errors.InvalidParameter, op, "vault secret is not a token lookup") } if s.Data["renewable"] == nil { - return errors.E(ctx, errors.WithCode(errors.VaultTokenNotRenewable), errors.WithOp(op)) + return errors.EDeprecated(errors.WithCode(errors.VaultTokenNotRenewable), errors.WithOp(op)) } renewable, err := parseutil.ParseBool(s.Data["renewable"]) if err != nil { - return errors.Wrap(ctx, err, op) + return errors.WrapDeprecated(err, op) } if !renewable { - return errors.E(ctx, errors.WithCode(errors.VaultTokenNotRenewable), errors.WithOp(op)) + return errors.EDeprecated(errors.WithCode(errors.VaultTokenNotRenewable), errors.WithOp(op)) } if s.Data["orphan"] == nil { - return errors.E(ctx, errors.WithCode(errors.VaultTokenNotOrphan), errors.WithOp(op)) + return errors.EDeprecated(errors.WithCode(errors.VaultTokenNotOrphan), errors.WithOp(op)) } orphan, err := parseutil.ParseBool(s.Data["orphan"]) if err != nil { - return errors.Wrap(ctx, err, op) + return errors.WrapDeprecated(err, op) } if !orphan { - return errors.E(ctx, errors.WithCode(errors.VaultTokenNotOrphan), errors.WithOp(op)) + return errors.EDeprecated(errors.WithCode(errors.VaultTokenNotOrphan), errors.WithOp(op)) } if s.Data["period"] == nil { - return errors.E(ctx, errors.WithCode(errors.VaultTokenNotPeriodic), errors.WithOp(op)) + return errors.EDeprecated(errors.WithCode(errors.VaultTokenNotPeriodic), errors.WithOp(op)) } return nil @@ -459,7 +459,7 @@ func (r *Repository) UpdateCredentialStore(ctx context.Context, cs *CredentialSt origStore := ps.toCredentialStore() origStore.inputToken = ps.Token if len(ps.ClientCert) > 0 { - origStore.clientCert, err = NewClientCertificate(ctx, ps.ClientCert, ps.ClientKey) + origStore.clientCert, err = NewClientCertificate(ps.ClientCert, ps.ClientKey) } if err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("can't recreate client certificate for vault client creation")) @@ -482,7 +482,7 @@ func (r *Repository) UpdateCredentialStore(ctx context.Context, cs *CredentialSt if err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("cannot lookup token for updated store")) } - if err := validateTokenLookup(ctx, op, tokenLookup); err != nil { + if err := validateTokenLookup(op, tokenLookup); err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op) } @@ -510,7 +510,7 @@ func (r *Repository) UpdateCredentialStore(ctx context.Context, cs *CredentialSt if err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to get vault token accessor")) } - if token, err = newToken(ctx, cs.GetPublicId(), cs.inputToken, []byte(accessor), tokenExpires); err != nil { + if token, err = newToken(cs.GetPublicId(), cs.inputToken, []byte(accessor), tokenExpires); err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op) } runJobsInterval := r.scheduler.GetRunJobsInterval() diff --git a/internal/credential/vault/repository_credential_store_test.go b/internal/credential/vault/repository_credential_store_test.go index 7ec49908211..14fd63499df 100644 --- a/internal/credential/vault/repository_credential_store_test.go +++ b/internal/credential/vault/repository_credential_store_test.go @@ -40,7 +40,7 @@ func TestRepository_CreateCredentialStoreResource(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -74,7 +74,7 @@ func TestRepository_CreateCredentialStoreResource(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -177,7 +177,7 @@ func TestRepository_CreateCredentialStoreNonResource(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -193,7 +193,7 @@ func TestRepository_CreateCredentialStoreNonResource(t *testing.T) { } if tt.tls == TestClientTLS { opts = append(opts, WithCACert(v.CaCert)) - clientCert, err := NewClientCertificate(ctx, v.ClientCert, v.ClientKey) + clientCert, err := NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(err) opts = append(opts, WithClientCert(clientCert)) } @@ -233,7 +233,6 @@ func TestRepository_CreateCredentialStoreNonResource(t *testing.T) { func TestRepository_LookupCredentialStore(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -247,17 +246,17 @@ func TestRepository_LookupCredentialStore(t *testing.T) { ccert := allocClientCertificate() ccert.StoreId = csWithoutClientCert.GetPublicId() - rows, err := rw.Delete(ctx, ccert, db.WithWhere("store_id = ?", csWithoutClientCert.GetPublicId())) + rows, err := rw.Delete(context.Background(), ccert, db.WithWhere("store_id = ?", csWithoutClientCert.GetPublicId())) require.NoError(t, err) require.Equal(t, 1, rows) - rows, err = rw.Exec(ctx, + rows, err = rw.Exec(context.Background(), "update credential_vault_token set status = ? where token_hmac = ?", []any{ExpiredToken, csWithExpiredToken.Token().TokenHmac}) require.NoError(t, err) require.Equal(t, 1, rows) - badId, err := newCredentialStoreId(ctx) + badId, err := newCredentialStoreId() assert.NoError(t, err) require.NotNil(t, badId) @@ -301,8 +300,9 @@ func TestRepository_LookupCredentialStore(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) + ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -742,7 +742,7 @@ func TestRepository_UpdateCredentialStore_Attributes(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -823,7 +823,7 @@ func TestRepository_UpdateCredentialStore_Attributes(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -875,7 +875,7 @@ func TestRepository_UpdateCredentialStore_Attributes(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -925,7 +925,7 @@ func TestRepository_UpdateCredentialStore_Attributes(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -956,7 +956,7 @@ func TestRepository_UpdateCredentialStore_Attributes(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -1007,7 +1007,7 @@ func TestRepository_UpdateCredentialStore_Attributes(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(ctx, sche, rw, rw, kms) @@ -1090,7 +1090,7 @@ func TestRepository_UpdateCredentialStore_VaultToken(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -1154,7 +1154,7 @@ func TestRepository_UpdateCredentialStore_ClientCert(t *testing.T) { sche := scheduler.TestScheduler(t, conn, wrapper) existingClientCert := func(t *testing.T, v *TestVaultServer) *ClientCertificate { - clientCert, err := NewClientCertificate(context.Background(), v.ClientCert, v.ClientKey) + clientCert, err := NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(t, err) return clientCert } @@ -1228,7 +1228,7 @@ func TestRepository_UpdateCredentialStore_ClientCert(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -1243,7 +1243,7 @@ func TestRepository_UpdateCredentialStore_ClientCert(t *testing.T) { } if tt.tls == TestClientTLS { opts = append(opts, WithCACert(v.CaCert)) - clientCert, err := NewClientCertificate(ctx, v.ClientCert, v.ClientKey) + clientCert, err := NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(err) opts = append(opts, WithClientCert(clientCert)) } @@ -1294,7 +1294,7 @@ func TestRepository_ListCredentialStores_Multiple_Scopes(t *testing.T) { assert, require := assert.New(t), require.New(t) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(context.Background(), rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) err = RegisterJobs(context.Background(), sche, rw, rw, kms) @@ -1345,7 +1345,7 @@ func TestRepository_DeleteCredentialStore(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) rw := db.New(conn) - repo, err := NewRepository(context.Background(), rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(t, err) require.NotNil(t, repo) err = RegisterJobs(context.Background(), sche, rw, rw, kms) diff --git a/internal/credential/vault/repository_credentials_test.go b/internal/credential/vault/repository_credentials_test.go index 05efd50ebaf..3079ba48906 100644 --- a/internal/credential/vault/repository_credentials_test.go +++ b/internal/credential/vault/repository_credentials_test.go @@ -130,7 +130,7 @@ func TestRepository_IssueCredentials(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := vault.NewRepository(ctx, rw, rw, kms, sche) + repo, err := vault.NewRepository(rw, rw, kms, sche) require.NoError(t, err) require.NotNil(t, repo) err = vault.RegisterJobs(ctx, sche, rw, rw, kms) @@ -146,7 +146,7 @@ func TestRepository_IssueCredentials(t *testing.T) { var opts []vault.Option opts = append(opts, vault.WithCACert(v.CaCert)) - clientCert, err := vault.NewClientCertificate(ctx, v.ClientCert, v.ClientKey) + clientCert, err := vault.NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(t, err) opts = append(opts, vault.WithClientCert(clientCert)) @@ -555,12 +555,12 @@ func TestRepository_Revoke(t *testing.T) { sessions[sess.GetPublicId()] = credentials } - ctx := context.Background() sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := vault.NewRepository(ctx, rw, rw, kms, sche) + repo, err := vault.NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) + ctx := context.Background() assert.Error(repo.Revoke(ctx, "")) type credCount struct { @@ -673,12 +673,12 @@ func Test_TerminateSession(t *testing.T) { sessions[sess.GetPublicId()] = credentials } - ctx := context.Background() sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := vault.NewRepository(ctx, rw, rw, kms, sche) + repo, err := vault.NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) + ctx := context.Background() assert.Error(repo.Revoke(ctx, "")) type credCount struct { diff --git a/internal/credential/vault/repository_ssh_certificate_credential_library.go b/internal/credential/vault/repository_ssh_certificate_credential_library.go index 49bcd95d966..1e48fca4fef 100644 --- a/internal/credential/vault/repository_ssh_certificate_credential_library.go +++ b/internal/credential/vault/repository_ssh_certificate_credential_library.go @@ -66,7 +66,7 @@ func (r *Repository) CreateSSHCertificateCredentialLibrary(ctx context.Context, return nil, errors.New(ctx, errors.InvalidParameter, op, "invalid credential type") } - id, err := newSSHCertificateCredentialLibraryId(ctx) + id, err := newSSHCertificateCredentialLibraryId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/credential/vault/repository_ssh_certificate_credential_library_test.go b/internal/credential/vault/repository_ssh_certificate_credential_library_test.go index fc28fa20c55..24b45ea65b9 100644 --- a/internal/credential/vault/repository_ssh_certificate_credential_library_test.go +++ b/internal/credential/vault/repository_ssh_certificate_credential_library_test.go @@ -587,7 +587,7 @@ func TestRepository_CreateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) got, err := repo.CreateSSHCertificateCredentialLibrary(ctx, prj.GetPublicId(), tt.in, tt.opts...) @@ -621,7 +621,7 @@ func TestRepository_CreateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -655,7 +655,7 @@ func TestRepository_CreateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) require.NoError(err) require.NotNil(repo) @@ -760,7 +760,7 @@ func TestRepository_LookupSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) orig, err := repo.CreateSSHCertificateCredentialLibrary(ctx, prj.GetPublicId(), tt.in) @@ -783,7 +783,7 @@ func TestRepository_LookupSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) // test @@ -799,10 +799,10 @@ func TestRepository_LookupSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) - badId, err := newSSHCertificateCredentialLibraryId(ctx) + badId, err := newSSHCertificateCredentialLibraryId() assert.NoError(err) require.NotNil(badId) // test @@ -874,7 +874,7 @@ func TestRepository_ListSSHCertificateCredentialLibraries_Limits(t *testing.T) { assert, require := assert.New(t), require.New(t) ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, sche, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) got, err := repo.ListSSHCertificateCredentialLibraries(ctx, libs[0].StoreId, tt.listOpts...) @@ -1567,7 +1567,7 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1674,7 +1674,7 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1708,7 +1708,7 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1756,7 +1756,7 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1797,7 +1797,7 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1829,7 +1829,7 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -1889,7 +1889,6 @@ func TestRepository_UpdateSSHCertificateCredentialLibrary(t *testing.T) { func TestRepository_DeleteSSHCertificateCredentialLibrary(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1899,7 +1898,7 @@ func TestRepository_DeleteSSHCertificateCredentialLibrary(t *testing.T) { cs := TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 1)[0] l := TestSSHCertificateCredentialLibraries(t, conn, wrapper, cs.GetPublicId(), 1)[0] - badId, err := newSSHCertificateCredentialLibraryId(ctx) + badId, err := newSSHCertificateCredentialLibraryId() require.NoError(t, err) require.NotNil(t, badId) @@ -1929,9 +1928,10 @@ func TestRepository_DeleteSSHCertificateCredentialLibrary(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) + ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) diff --git a/internal/credential/vault/repository_test.go b/internal/credential/vault/repository_test.go index 82a74bf08d0..cd3b9194d8c 100644 --- a/internal/credential/vault/repository_test.go +++ b/internal/credential/vault/repository_test.go @@ -4,7 +4,6 @@ package vault import ( - "context" "testing" "github.com/hashicorp/boundary/internal/db" @@ -130,7 +129,7 @@ func TestRepository_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRepository(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.args.scheduler, tt.args.opts...) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms, tt.args.scheduler, tt.args.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) diff --git a/internal/credential/vault/rewrapping_test.go b/internal/credential/vault/rewrapping_test.go index 45a2527d83c..94a56ae25c8 100644 --- a/internal/credential/vault/rewrapping_test.go +++ b/internal/credential/vault/rewrapping_test.go @@ -42,7 +42,7 @@ func TestRewrap_credVaultClientCertificateRewrapFn(t *testing.T) { _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) cs := TestCredentialStore(t, conn, wrapper, prj.PublicId, "https://vault.consul.service", "token", "accessor") - cert, err := NewClientCertificate(ctx, []byte(certPem), []byte(keyPem)) + cert, err := NewClientCertificate([]byte(certPem), []byte(keyPem)) assert.NoError(t, err) cert.StoreId = cs.PublicId diff --git a/internal/credential/vault/testing.go b/internal/credential/vault/testing.go index 11665dc010c..7b9a58d3f9b 100644 --- a/internal/credential/vault/testing.go +++ b/internal/credential/vault/testing.go @@ -49,7 +49,7 @@ func TestCredentialStore(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, pr cs, err := NewCredentialStore(projectId, vaultAddr, []byte(vaultToken), opts...) assert.NoError(t, err) require.NotNil(t, cs) - id, err := newCredentialStoreId(ctx) + id, err := newCredentialStoreId() assert.NoError(t, err) require.NotEmpty(t, id) cs.PublicId = id @@ -92,7 +92,7 @@ func TestCredentialStores(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, p cs := TestCredentialStore(t, conn, wrapper, projectId, fmt.Sprintf("http://vault%d", i), fmt.Sprintf("vault-token-%s-%d", projectId, i), fmt.Sprintf("accessor-%s-%d", projectId, i)) inCert := testClientCert(t, testCaCert(t)) - clientCert, err := NewClientCertificate(ctx, inCert.Cert.Cert, inCert.Cert.Key) + clientCert, err := NewClientCertificate(inCert.Cert.Cert, inCert.Cert.Key) require.NoError(t, err) require.NotEmpty(t, clientCert) clientCert.StoreId = cs.GetPublicId() @@ -117,7 +117,6 @@ func TestCredentialStores(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, p // test will fail. func TestCredentialLibraries(t testing.TB, conn *db.DB, _ wrapping.Wrapper, storeId string, count int) []*CredentialLibrary { t.Helper() - ctx := context.Background() assert, require := assert.New(t), require.New(t) w := db.New(conn) var libs []*CredentialLibrary @@ -126,11 +125,12 @@ func TestCredentialLibraries(t testing.TB, conn *db.DB, _ wrapping.Wrapper, stor lib, err := NewCredentialLibrary(storeId, fmt.Sprintf("vault/path%d", i), WithMethod(MethodGet)) assert.NoError(err) require.NotNil(lib) - id, err := newCredentialLibraryId(ctx) + id, err := newCredentialLibraryId() assert.NoError(err) require.NotEmpty(id) lib.PublicId = id + ctx := context.Background() _, err2 := w.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(_ db.Reader, iw db.Writer) error { return iw.Create(ctx, lib) @@ -149,7 +149,6 @@ func TestCredentialLibraries(t testing.TB, conn *db.DB, _ wrapping.Wrapper, stor // libraries, the test will fail. func TestSSHCertificateCredentialLibraries(t testing.TB, conn *db.DB, _ wrapping.Wrapper, storeId string, count int) []*SSHCertificateCredentialLibrary { t.Helper() - ctx := context.Background() assert, require := assert.New(t), require.New(t) w := db.New(conn) var libs []*SSHCertificateCredentialLibrary @@ -158,11 +157,12 @@ func TestSSHCertificateCredentialLibraries(t testing.TB, conn *db.DB, _ wrapping lib, err := NewSSHCertificateCredentialLibrary(storeId, fmt.Sprintf("ssh/sign/role-%d", i), "username", WithKeyType(KeyTypeEd25519)) assert.NoError(err) require.NotNil(lib) - id, err := newSSHCertificateCredentialLibraryId(ctx) + id, err := newSSHCertificateCredentialLibraryId() assert.NoError(err) require.NotEmpty(id) lib.PublicId = id + ctx := context.Background() _, err2 := w.DoTx(ctx, db.StdRetryCnt, db.ExpBackoff{}, func(_ db.Reader, iw db.Writer) error { return iw.Create(ctx, lib) @@ -186,7 +186,7 @@ func TestCredentials(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, librar ctx := context.Background() kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sche) + repo, err := NewRepository(rw, rw, kms, sche) assert.NoError(err) require.NotNil(repo) @@ -203,11 +203,11 @@ func TestCredentials(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, librar var credentials []*Credential for i := 0; i < count; i++ { - credential, err := newCredential(ctx, lib.GetPublicId(), fmt.Sprintf("vault/credential/%d", i), token.GetTokenHmac(), 5*time.Minute) + credential, err := newCredential(lib.GetPublicId(), fmt.Sprintf("vault/credential/%d", i), token.GetTokenHmac(), 5*time.Minute) assert.NoError(err) require.NotNil(credential) - id, err := newCredentialId(ctx) + id, err := newCredentialId() assert.NoError(err) require.NotNil(id) credential.PublicId = id @@ -230,7 +230,7 @@ func createTestToken(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, projec databaseWrapper, err := kkms.GetWrapper(ctx, projectId, kms.KeyPurposeDatabase) require.NoError(t, err) - inToken, err := newToken(ctx, storeId, []byte(token), []byte(accessor), 5*time.Minute) + inToken, err := newToken(storeId, []byte(token), []byte(accessor), 5*time.Minute) assert.NoError(t, err) require.NotNil(t, inToken) @@ -304,7 +304,7 @@ type testCert struct { func (tc *testCert) ClientCertificate(t testing.TB) *ClientCertificate { t.Helper() - c, err := NewClientCertificate(context.Background(), tc.Cert, tc.Key) + c, err := NewClientCertificate(tc.Cert, tc.Key) require.NoError(t, err) return c } diff --git a/internal/credential/vault/vault_token.go b/internal/credential/vault/vault_token.go index d6ad067e720..8bb1c461b9e 100644 --- a/internal/credential/vault/vault_token.go +++ b/internal/credential/vault/vault_token.go @@ -55,19 +55,19 @@ type Token struct { expiration time.Duration `gorm:"-"` } -func newToken(ctx context.Context, storeId string, token TokenSecret, accessor []byte, expiration time.Duration) (*Token, error) { +func newToken(storeId string, token TokenSecret, accessor []byte, expiration time.Duration) (*Token, error) { const op = "vault.newToken" if storeId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no store id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no store id") } if len(token) == 0 { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no vault token") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no vault token") } if len(accessor) == 0 { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no vault token accessor") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no vault token accessor") } if expiration == 0 { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no expiration") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no expiration") } tokenCopy := make(TokenSecret, len(token)) @@ -77,7 +77,7 @@ func newToken(ctx context.Context, storeId string, token TokenSecret, accessor [ hmac, err := crypto.HmacSha256WithPrk(context.Background(), tokenCopy, accessorCopy) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithCode(errors.Encrypt)) + return nil, errors.WrapDeprecated(err, op, errors.WithCode(errors.Encrypt)) } t := &Token{ expiration: expiration.Round(time.Second), diff --git a/internal/credential/vault/vault_token_test.go b/internal/credential/vault/vault_token_test.go index 88031f46611..9d75ce6d526 100644 --- a/internal/credential/vault/vault_token_test.go +++ b/internal/credential/vault/vault_token_test.go @@ -121,7 +121,7 @@ func TestToken_New(t *testing.T) { require.NoError(err) require.NotNil(databaseWrapper) - got, err := newToken(ctx, tt.args.storeId, tt.args.token, tt.args.accessor, tt.args.expiration) + got, err := newToken(tt.args.storeId, tt.args.token, tt.args.accessor, tt.args.expiration) if tt.wantErr { assert.Error(err) require.Nil(got) diff --git a/internal/daemon/cluster/handlers/worker_service_status_test.go b/internal/daemon/cluster/handlers/worker_service_status_test.go index b13c4025b78..59089d38ffb 100644 --- a/internal/daemon/cluster/handlers/worker_service_status_test.go +++ b/internal/daemon/cluster/handlers/worker_service_status_test.go @@ -46,7 +46,7 @@ func TestStatus(t *testing.T) { iamRepo := iam.TestRepo(t, conn, wrapper) org, prj := iam.TestScopes(t, iamRepo) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kms) + serverRepo, _ := server.NewRepository(rw, rw, kms) serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", @@ -482,7 +482,7 @@ func TestStatusSessionClosed(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kms) + serverRepo, _ := server.NewRepository(rw, rw, kms) serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", @@ -671,7 +671,7 @@ func TestStatusDeadConnection(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kms) + serverRepo, _ := server.NewRepository(rw, rw, kms) serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", @@ -825,7 +825,7 @@ func TestStatusWorkerWithKeyId(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kms) + serverRepo, _ := server.NewRepository(rw, rw, kms) serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", @@ -1026,7 +1026,7 @@ func TestStatusAuthorizedWorkers(t *testing.T) { err := kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader)) require.NoError(t, err) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kmsCache) + serverRepo, _ := server.NewRepository(rw, rw, kmsCache) serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", @@ -1241,7 +1241,7 @@ func TestWorkerOperationalStatus(t *testing.T) { wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kms) + serverRepo, _ := server.NewRepository(rw, rw, kms) serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", diff --git a/internal/daemon/cluster/handlers/worker_service_test.go b/internal/daemon/cluster/handlers/worker_service_test.go index 2ed7d73f916..cb1f7730e1d 100644 --- a/internal/daemon/cluster/handlers/worker_service_test.go +++ b/internal/daemon/cluster/handlers/worker_service_test.go @@ -54,7 +54,7 @@ func TestLookupSession(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } workerAuthRepoFn := func() (*server.WorkerAuthRepositoryStorage, error) { return server.NewRepositoryStorage(ctx, rw, rw, kms) @@ -313,7 +313,7 @@ func TestAuthorizeConnection(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } workerAuthRepoFn := func() (*server.WorkerAuthRepositoryStorage, error) { return server.NewRepositoryStorage(ctx, rw, rw, kmsCache) @@ -469,7 +469,7 @@ func TestCancelSession(t *testing.T) { org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } workerAuthRepoFn := func() (*server.WorkerAuthRepositoryStorage, error) { return server.NewRepositoryStorage(ctx, rw, rw, kms) @@ -568,7 +568,7 @@ func TestHcpbWorkers(t *testing.T) { require.NoError(kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } workerAuthRepoFn := func() (*server.WorkerAuthRepositoryStorage, error) { return server.NewRepositoryStorage(ctx, rw, rw, kmsCache) diff --git a/internal/daemon/common/handler.go b/internal/daemon/common/handler.go index 967c08c2eb2..f4b747ad868 100644 --- a/internal/daemon/common/handler.go +++ b/internal/daemon/common/handler.go @@ -106,16 +106,16 @@ func WrapWithOptionals(ctx context.Context, with *writerWrapper, wrap http.Respo // WrapWithEventsHandler will wrap the provided http.Handler with a // handler that adds an Eventer to the request context and starts/flushes gated // events of type: observation and audit -func WrapWithEventsHandler(ctx context.Context, h http.Handler, e *event.Eventer, kms *kms.Kms, listenerCfg *listenerutil.ListenerConfig) (http.Handler, error) { +func WrapWithEventsHandler(h http.Handler, e *event.Eventer, kms *kms.Kms, listenerCfg *listenerutil.ListenerConfig) (http.Handler, error) { const op = "common.WrapWithEventsHandler" if h == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing handler") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing handler") } if e == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing eventer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing eventer") } if kms == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() diff --git a/internal/daemon/common/handler_test.go b/internal/daemon/common/handler_test.go index b512834d4ed..e9b682706fc 100644 --- a/internal/daemon/common/handler_test.go +++ b/internal/daemon/common/handler_test.go @@ -264,7 +264,7 @@ func Test_WrapWithEventsHandler(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := WrapWithEventsHandler(context.Background(), tt.h, tt.e, tt.kms, testListenerCfg) + got, err := WrapWithEventsHandler(tt.h, tt.e, tt.kms, testListenerCfg) if tt.wantErrMatch != nil { require.Error(err) assert.Nil(got) diff --git a/internal/daemon/controller/auth/auth.go b/internal/daemon/controller/auth/auth.go index 72124c27fda..1a058f76c33 100644 --- a/internal/daemon/controller/auth/auth.go +++ b/internal/daemon/controller/auth/auth.go @@ -656,7 +656,6 @@ func (v verifier) performAuthCheck(ctx context.Context) ( permsOpts = append(permsOpts, perms.WithAccountId(*userData.Account.Id)) } parsed, err := perms.Parse( - ctx, pair.ScopeId, pair.Grant, permsOpts...) diff --git a/internal/daemon/controller/auth/auth_test.go b/internal/daemon/controller/auth/auth_test.go index afee9822a6f..98b17a70335 100644 --- a/internal/daemon/controller/auth/auth_test.go +++ b/internal/daemon/controller/auth/auth_test.go @@ -29,12 +29,11 @@ import ( ) func TestAuthTokenAuthenticator(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - tokenRepo, err := authtoken.NewRepository(ctx, rw, rw, kms) + tokenRepo, err := authtoken.NewRepository(rw, rw, kms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) tokenRepoFn := func() (*authtoken.Repository, error) { @@ -44,7 +43,7 @@ func TestAuthTokenAuthenticator(t *testing.T) { return iamRepo, nil } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } o, _ := iam.TestScopes(t, iamRepo) @@ -145,7 +144,6 @@ func TestAuthTokenAuthenticator(t *testing.T) { } func TestVerify_AuditEvent(t *testing.T) { - ctx := context.Background() eventConfig := event.TestEventerConfig(t, "Test_Verify", event.TestWithAuditSink(t)) testLock := &sync.Mutex{} testLogger := hclog.New(&hclog.LoggerOptions{ @@ -158,7 +156,7 @@ func TestVerify_AuditEvent(t *testing.T) { rw := db.New(conn) wrapper := db.TestWrapper(t) testKms := kms.TestKms(t, conn, wrapper) - tokenRepo, err := authtoken.NewRepository(ctx, rw, rw, testKms) + tokenRepo, err := authtoken.NewRepository(rw, rw, testKms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) tokenRepoFn := func() (*authtoken.Repository, error) { @@ -168,7 +166,7 @@ func TestVerify_AuditEvent(t *testing.T) { return iamRepo, nil } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } o, _ := iam.TestScopes(t, iamRepo) @@ -239,7 +237,7 @@ func TestVerify_AuditEvent(t *testing.T) { } requestInfo.PublicId, requestInfo.EncryptedToken, requestInfo.TokenFormat = GetTokenFromRequest(context.TODO(), testKms, req) - ctx := NewVerifierContext(ctx, iamRepoFn, tokenRepoFn, serversRepoFn, testKms, &requestInfo) + ctx := NewVerifierContext(context.Background(), iamRepoFn, tokenRepoFn, serversRepoFn, testKms, &requestInfo) _ = os.WriteFile(eventConfig.AuditEvents.Name(), nil, 0o666) // clean out audit events from previous calls _ = Verify(ctx, tt.opt...) diff --git a/internal/daemon/controller/common/scopeids/scope_ids_test.go b/internal/daemon/controller/common/scopeids/scope_ids_test.go index a7fbf8da8b4..663ff9935d2 100644 --- a/internal/daemon/controller/common/scopeids/scope_ids_test.go +++ b/internal/daemon/controller/common/scopeids/scope_ids_test.go @@ -30,7 +30,6 @@ import ( // behavior that used role permissions instead of the resource type under test // is fixed. func TestListingScopeIds(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -40,18 +39,19 @@ func TestListingScopeIds(t *testing.T) { return iamRepo, nil } authTokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } - s, err := groups.NewService(ctx, iamRepoFn) + s, err := groups.NewService(iamRepoFn) require.NoError(t, err) + ctx := context.Background() sessionsRepoFn := func(opt ...session.Option) (*session.Repository, error) { return session.NewRepository(ctx, rw, rw, kms, opt...) } - sess, err := sessions.NewService(ctx, sessionsRepoFn, iamRepoFn) + sess, err := sessions.NewService(sessionsRepoFn, iamRepoFn) require.NoError(t, err) tcs := []struct { diff --git a/internal/daemon/controller/controller.go b/internal/daemon/controller/controller.go index 2b6e7ff9410..7e6b2d09ae7 100644 --- a/internal/daemon/controller/controller.go +++ b/internal/daemon/controller/controller.go @@ -186,7 +186,7 @@ func New(ctx context.Context, conf *Config) (*Controller, error) { conf.RawConfig.Controller = new(config.Controller) } - if err := conf.RawConfig.Controller.InitNameIfEmpty(ctx); err != nil { + if err := conf.RawConfig.Controller.InitNameIfEmpty(); err != nil { return nil, fmt.Errorf("error auto-generating controller name: %w", err) } @@ -327,7 +327,7 @@ func New(ctx context.Context, conf *Config) (*Controller, error) { } // we need to get all the scopes so we can reconcile the DEKs for each scope. - iamRepo, err := iam.NewRepository(ctx, dbase, dbase, c.kms, iam.WithRandomReader(c.conf.SecureRandomReader)) + iamRepo, err := iam.NewRepository(dbase, dbase, c.kms, iam.WithRandomReader(c.conf.SecureRandomReader)) if err != nil { return nil, fmt.Errorf("unable to initialize iam repository: %w", err) } @@ -354,7 +354,7 @@ func New(ctx context.Context, conf *Config) (*Controller, error) { return nil, fmt.Errorf("error rotating eventer audit wrapper: %w", err) } jobRepoFn := func() (*job.Repository, error) { - return job.NewRepository(ctx, dbase, dbase, c.kms) + return job.NewRepository(dbase, dbase, c.kms) } // TODO: Allow setting run jobs limit from config schedulerOpts := []scheduler.Option{scheduler.WithRunJobsLimit(-1)} @@ -365,18 +365,18 @@ func New(ctx context.Context, conf *Config) (*Controller, error) { schedulerOpts = append(schedulerOpts, scheduler.WithMonitorInterval(c.conf.RawConfig.Controller.Scheduler.MonitorIntervalDuration)) } - c.scheduler, err = scheduler.New(ctx, c.conf.RawConfig.Controller.Name, jobRepoFn, schedulerOpts...) + c.scheduler, err = scheduler.New(c.conf.RawConfig.Controller.Name, jobRepoFn, schedulerOpts...) if err != nil { return nil, fmt.Errorf("error creating new scheduler: %w", err) } c.IamRepoFn = func() (*iam.Repository, error) { - return iam.NewRepository(ctx, dbase, dbase, c.kms, iam.WithRandomReader(c.conf.SecureRandomReader)) + return iam.NewRepository(dbase, dbase, c.kms, iam.WithRandomReader(c.conf.SecureRandomReader)) } c.StaticHostRepoFn = func() (*static.Repository, error) { - return static.NewRepository(ctx, dbase, dbase, c.kms) + return static.NewRepository(dbase, dbase, c.kms) } c.PluginHostRepoFn = func() (*pluginhost.Repository, error) { - return pluginhost.NewRepository(ctx, dbase, dbase, c.kms, c.scheduler, c.conf.HostPlugins) + return pluginhost.NewRepository(dbase, dbase, c.kms, c.scheduler, c.conf.HostPlugins) } c.PluginRepoFn = func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, dbase, dbase, c.kms) @@ -385,18 +385,18 @@ func New(ctx context.Context, conf *Config) (*Controller, error) { return pluginstorage.NewRepository(ctx, dbase, dbase, c.kms) } c.AuthTokenRepoFn = func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, dbase, dbase, c.kms, + return authtoken.NewRepository(dbase, dbase, c.kms, authtoken.WithTokenTimeToLiveDuration(c.conf.RawConfig.Controller.AuthTokenTimeToLiveDuration), authtoken.WithTokenTimeToStaleDuration(c.conf.RawConfig.Controller.AuthTokenTimeToStaleDuration)) } c.VaultCredentialRepoFn = func() (*vault.Repository, error) { - return vault.NewRepository(ctx, dbase, dbase, c.kms, c.scheduler) + return vault.NewRepository(dbase, dbase, c.kms, c.scheduler) } c.StaticCredentialRepoFn = func() (*credstatic.Repository, error) { return credstatic.NewRepository(ctx, dbase, dbase, c.kms) } c.ServersRepoFn = func() (*server.Repository, error) { - return server.NewRepository(ctx, dbase, dbase, c.kms) + return server.NewRepository(dbase, dbase, c.kms) } c.OidcRepoFn = func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, dbase, dbase, c.kms) @@ -405,7 +405,7 @@ func New(ctx context.Context, conf *Config) (*Controller, error) { return ldap.NewRepository(ctx, dbase, dbase, c.kms) } c.PasswordAuthRepoFn = func() (*password.Repository, error) { - return password.NewRepository(ctx, dbase, dbase, c.kms) + return password.NewRepository(dbase, dbase, c.kms) } c.TargetRepoFn = func(o ...target.Option) (*target.Repository, error) { return target.NewRepository(ctx, dbase, dbase, c.kms, o...) diff --git a/internal/daemon/controller/gateway.go b/internal/daemon/controller/gateway.go index ddbe51b0bf7..412c542c55d 100644 --- a/internal/daemon/controller/gateway.go +++ b/internal/daemon/controller/gateway.go @@ -79,7 +79,7 @@ func newGrpcServer( eventer *event.Eventer, ) (*grpc.Server, string, error) { const op = "controller.newGrpcServer" - ticket, err := db.NewPrivateId(ctx, "gwticket") + ticket, err := db.NewPrivateId("gwticket") if err != nil { return nil, "", errors.Wrap(ctx, err, op, errors.WithMsg("unable to generate gateway ticket")) } diff --git a/internal/daemon/controller/handler.go b/internal/daemon/controller/handler.go index e0ae4b8c722..325e5c2c45c 100644 --- a/internal/daemon/controller/handler.go +++ b/internal/daemon/controller/handler.go @@ -97,7 +97,7 @@ func (c *Controller) apiHandler(props HandlerProperties) (http.Handler, error) { commonWrappedHandler := wrapHandlerWithCommonFuncs(corsWrappedHandler, c, props) callbackInterceptingHandler := wrapHandlerWithCallbackInterceptor(commonWrappedHandler, c) printablePathCheckHandler := cleanhttp.PrintablePathCheckHandler(callbackInterceptingHandler, nil) - eventsHandler, err := common.WrapWithEventsHandler(c.baseContext, printablePathCheckHandler, c.conf.Eventer, c.kms, props.ListenerConfig) + eventsHandler, err := common.WrapWithEventsHandler(printablePathCheckHandler, c.conf.Eventer, c.kms, props.ListenerConfig) if err != nil { return nil, err } @@ -122,7 +122,7 @@ func (c *Controller) GetHealthHandler(lcfg *listenerutil.ListenerConfig) (http.H } wrapped := wrapHandlerWithCommonFuncs(healthGrpcGwMux, c, HandlerProperties{lcfg, c.baseContext}) - return common.WrapWithEventsHandler(c.baseContext, wrapped, c.conf.Eventer, c.kms, lcfg) + return common.WrapWithEventsHandler(wrapped, c.conf.Eventer, c.kms, lcfg) } func registerHealthGrpcGatewayEndpoint(ctx context.Context, gwMux *runtime.ServeMux, dialOptions ...grpc.DialOption) error { @@ -135,21 +135,21 @@ func (c *Controller) registerGrpcServices(s *grpc.Server) error { currentServices := s.GetServiceInfo() if _, ok := currentServices[services.HostCatalogService_ServiceDesc.ServiceName]; !ok { - hcs, err := host_catalogs.NewService(c.baseContext, c.StaticHostRepoFn, c.PluginHostRepoFn, c.PluginRepoFn, c.IamRepoFn) + hcs, err := host_catalogs.NewService(c.StaticHostRepoFn, c.PluginHostRepoFn, c.PluginRepoFn, c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create host catalog handler service: %w", err) } services.RegisterHostCatalogServiceServer(s, hcs) } if _, ok := currentServices[services.HostSetService_ServiceDesc.ServiceName]; !ok { - hss, err := host_sets.NewService(c.baseContext, c.StaticHostRepoFn, c.PluginHostRepoFn) + hss, err := host_sets.NewService(c.StaticHostRepoFn, c.PluginHostRepoFn) if err != nil { return fmt.Errorf("failed to create host set handler service: %w", err) } services.RegisterHostSetServiceServer(s, hss) } if _, ok := currentServices[services.HostService_ServiceDesc.ServiceName]; !ok { - hs, err := hosts.NewService(c.baseContext, c.StaticHostRepoFn, c.PluginHostRepoFn) + hs, err := hosts.NewService(c.StaticHostRepoFn, c.PluginHostRepoFn) if err != nil { return fmt.Errorf("failed to create host handler service: %w", err) } @@ -163,14 +163,14 @@ func (c *Controller) registerGrpcServices(s *grpc.Server) error { services.RegisterAccountServiceServer(s, accts) } if _, ok := currentServices[services.AuthMethodService_ServiceDesc.ServiceName]; !ok { - authMethods, err := authmethods.NewService(c.baseContext, c.kms, c.PasswordAuthRepoFn, c.OidcRepoFn, c.IamRepoFn, c.AuthTokenRepoFn, c.LdapRepoFn) + authMethods, err := authmethods.NewService(c.kms, c.PasswordAuthRepoFn, c.OidcRepoFn, c.IamRepoFn, c.AuthTokenRepoFn, c.LdapRepoFn) if err != nil { return fmt.Errorf("failed to create auth method handler service: %w", err) } services.RegisterAuthMethodServiceServer(s, authMethods) } if _, ok := currentServices[services.AuthTokenService_ServiceDesc.ServiceName]; !ok { - authtoks, err := authtokens.NewService(c.baseContext, c.AuthTokenRepoFn, c.IamRepoFn) + authtoks, err := authtokens.NewService(c.AuthTokenRepoFn, c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create auth token handler service: %w", err) } @@ -184,7 +184,7 @@ func (c *Controller) registerGrpcServices(s *grpc.Server) error { services.RegisterScopeServiceServer(s, os) } if _, ok := currentServices[services.UserService_ServiceDesc.ServiceName]; !ok { - us, err := users.NewService(c.baseContext, c.IamRepoFn) + us, err := users.NewService(c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create user handler service: %w", err) } @@ -236,21 +236,21 @@ func (c *Controller) registerGrpcServices(s *grpc.Server) error { services.RegisterTargetServiceServer(s, ts) } if _, ok := currentServices[services.GroupService_ServiceDesc.ServiceName]; !ok { - gs, err := groups.NewService(c.baseContext, c.IamRepoFn) + gs, err := groups.NewService(c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create group handler service: %w", err) } services.RegisterGroupServiceServer(s, gs) } if _, ok := currentServices[services.RoleService_ServiceDesc.ServiceName]; !ok { - rs, err := roles.NewService(c.baseContext, c.IamRepoFn) + rs, err := roles.NewService(c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create role handler service: %w", err) } services.RegisterRoleServiceServer(s, rs) } if _, ok := currentServices[services.SessionService_ServiceDesc.ServiceName]; !ok { - ss, err := sessions.NewService(c.baseContext, c.SessionRepoFn, c.IamRepoFn) + ss, err := sessions.NewService(c.SessionRepoFn, c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create session handler service: %w", err) } @@ -271,7 +271,7 @@ func (c *Controller) registerGrpcServices(s *grpc.Server) error { services.RegisterCredentialStoreServiceServer(s, cs) } if _, ok := currentServices[services.CredentialLibraryService_ServiceDesc.ServiceName]; !ok { - cl, err := credentiallibraries.NewService(c.baseContext, c.VaultCredentialRepoFn, c.IamRepoFn) + cl, err := credentiallibraries.NewService(c.VaultCredentialRepoFn, c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create credential library handler service: %w", err) } @@ -286,7 +286,7 @@ func (c *Controller) registerGrpcServices(s *grpc.Server) error { services.RegisterWorkerServiceServer(s, ws) } if _, ok := currentServices[services.CredentialService_ServiceDesc.ServiceName]; !ok { - c, err := credentials.NewService(c.baseContext, c.StaticCredentialRepoFn, c.IamRepoFn) + c, err := credentials.NewService(c.StaticCredentialRepoFn, c.IamRepoFn) if err != nil { return fmt.Errorf("failed to create credential handler service: %w", err) } diff --git a/internal/daemon/controller/handlers/accounts/account_service.go b/internal/daemon/controller/handlers/accounts/account_service.go index 40ac744ed8b..305b5b6f801 100644 --- a/internal/daemon/controller/handlers/accounts/account_service.go +++ b/internal/daemon/controller/handlers/accounts/account_service.go @@ -102,10 +102,10 @@ var ( func init() { var err error - if pwMaskManager, err = handlers.NewMaskManager(context.Background(), handlers.MaskDestination{&pwstore.Account{}}, handlers.MaskSource{&pb.Account{}, &pb.PasswordAccountAttributes{}}); err != nil { + if pwMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&pwstore.Account{}}, handlers.MaskSource{&pb.Account{}, &pb.PasswordAccountAttributes{}}); err != nil { panic(err) } - if oidcMaskManager, err = handlers.NewMaskManager(context.Background(), handlers.MaskDestination{&oidcstore.Account{}}, handlers.MaskSource{&pb.Account{}, &pb.OidcAccountAttributes{}}); err != nil { + if oidcMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&oidcstore.Account{}}, handlers.MaskSource{&pb.Account{}, &pb.OidcAccountAttributes{}}); err != nil { panic(err) } } @@ -137,7 +137,7 @@ func NewService(ctx context.Context, pwRepo common.PasswordAuthRepoFactory, oidc // ListAccounts implements the interface pbs.AccountServiceServer. func (s Service) ListAccounts(ctx context.Context, req *pbs.ListAccountsRequest) (*pbs.ListAccountsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } _, authResults := s.parentAndAuthResult(ctx, req.GetAuthMethodId(), action.List) @@ -152,7 +152,7 @@ func (s Service) ListAccounts(ctx context.Context, req *pbs.ListAccountsRequest) return &pbs.ListAccountsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -202,7 +202,7 @@ func (s Service) ListAccounts(ctx context.Context, req *pbs.ListAccountsRequest) func (s Service) GetAccount(ctx context.Context, req *pbs.GetAccountRequest) (*pbs.GetAccountResponse, error) { const op = "accounts.(Service).GetAccount" - if err := validateGetRequest(ctx, req); err != nil { + if err := validateGetRequest(req); err != nil { return nil, err } @@ -244,7 +244,7 @@ func (s Service) GetAccount(ctx context.Context, req *pbs.GetAccountRequest) (*p func (s Service) CreateAccount(ctx context.Context, req *pbs.CreateAccountRequest) (*pbs.CreateAccountResponse, error) { const op = "accounts.(Service).CreateAccount" - if err := validateCreateRequest(ctx, req); err != nil { + if err := validateCreateRequest(req); err != nil { return nil, err } @@ -283,7 +283,7 @@ func (s Service) CreateAccount(ctx context.Context, req *pbs.CreateAccountReques func (s Service) UpdateAccount(ctx context.Context, req *pbs.UpdateAccountRequest) (*pbs.UpdateAccountResponse, error) { const op = "accounts.(Service).UpdateAccount" - if err := validateUpdateRequest(ctx, req); err != nil { + if err := validateUpdateRequest(req); err != nil { return nil, err } @@ -320,7 +320,7 @@ func (s Service) UpdateAccount(ctx context.Context, req *pbs.UpdateAccountReques // DeleteAccount implements the interface pbs.AccountServiceServer. func (s Service) DeleteAccount(ctx context.Context, req *pbs.DeleteAccountRequest) (*pbs.DeleteAccountResponse, error) { - if err := validateDeleteRequest(ctx, req); err != nil { + if err := validateDeleteRequest(req); err != nil { return nil, err } _, authResults := s.parentAndAuthResult(ctx, req.GetId(), action.Delete) @@ -338,7 +338,7 @@ func (s Service) DeleteAccount(ctx context.Context, req *pbs.DeleteAccountReques func (s Service) ChangePassword(ctx context.Context, req *pbs.ChangePasswordRequest) (*pbs.ChangePasswordResponse, error) { const op = "accounts.(Service).ChangePassword" - if err := validateChangePasswordRequest(ctx, req); err != nil { + if err := validateChangePasswordRequest(req); err != nil { return nil, err } @@ -377,7 +377,7 @@ func (s Service) ChangePassword(ctx context.Context, req *pbs.ChangePasswordRequ func (s Service) SetPassword(ctx context.Context, req *pbs.SetPasswordRequest) (*pbs.SetPasswordResponse, error) { const op = "accounts.(Service).SetPassword" - if err := validateSetPasswordRequest(ctx, req); err != nil { + if err := validateSetPasswordRequest(req); err != nil { return nil, err } @@ -493,7 +493,7 @@ func (s Service) createPwInRepo(ctx context.Context, am auth.AuthMethod, item *p if item.GetDescription() != nil { opts = append(opts, password.WithDescription(item.GetDescription().GetValue())) } - a, err := password.NewAccount(ctx, am.GetPublicId(), opts...) + a, err := password.NewAccount(am.GetPublicId(), opts...) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build account for creation: %v.", err) } @@ -627,7 +627,7 @@ func (s Service) createInRepo(ctx context.Context, am auth.AuthMethod, item *pb. func (s Service) updatePwInRepo(ctx context.Context, scopeId, authMethId, id string, mask []string, item *pb.Account) (*password.Account, error) { const op = "accounts.(Service).updatePwInRepo" - u, err := toStoragePwAccount(ctx, authMethId, item) + u, err := toStoragePwAccount(authMethId, item) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build account for update: %v.", err) } @@ -1127,10 +1127,10 @@ func toProto(ctx context.Context, in auth.Account, opt ...handlers.Option) (*pb. return &out, nil } -func toStoragePwAccount(ctx context.Context, amId string, item *pb.Account) (*password.Account, error) { +func toStoragePwAccount(amId string, item *pb.Account) (*password.Account, error) { const op = "accounts.toStoragePwAccount" if item == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil account.") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil account.") } var opts []password.Option if item.GetName() != nil { @@ -1139,7 +1139,7 @@ func toStoragePwAccount(ctx context.Context, amId string, item *pb.Account) (*pa if item.GetDescription() != nil { opts = append(opts, password.WithDescription(item.GetDescription().GetValue())) } - u, err := password.NewAccount(ctx, amId, opts...) + u, err := password.NewAccount(amId, opts...) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build account for creation: %v.", err) } @@ -1156,18 +1156,18 @@ func toStoragePwAccount(ctx context.Context, amId string, item *pb.Account) (*pa // - The path passed in is correctly formatted // - All required parameters are set // - There are no conflicting parameters provided -func validateGetRequest(ctx context.Context, req *pbs.GetAccountRequest) error { +func validateGetRequest(req *pbs.GetAccountRequest) error { const op = "accounts.validateGetRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateGetRequest(handlers.NoopValidatorFn, req, globals.PasswordAccountPreviousPrefix, globals.PasswordAccountPrefix, globals.OidcAccountPrefix, globals.LdapAccountPrefix) } -func validateCreateRequest(ctx context.Context, req *pbs.CreateAccountRequest) error { +func validateCreateRequest(req *pbs.CreateAccountRequest) error { const op = "accounts.validateCreateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateCreateRequest(req.GetItem(), func() map[string]string { badFields := map[string]string{} @@ -1248,10 +1248,10 @@ func validateCreateRequest(ctx context.Context, req *pbs.CreateAccountRequest) e }) } -func validateUpdateRequest(ctx context.Context, req *pbs.UpdateAccountRequest) error { +func validateUpdateRequest(req *pbs.UpdateAccountRequest) error { const op = "accounts.validateUpdateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateUpdateRequest(req, req.GetItem(), func() map[string]string { badFields := map[string]string{} @@ -1300,24 +1300,24 @@ func validateUpdateRequest(ctx context.Context, req *pbs.UpdateAccountRequest) e }, globals.PasswordAccountPreviousPrefix, globals.PasswordAccountPrefix, globals.OidcAccountPrefix, globals.LdapAccountPrefix) } -func validateDeleteRequest(ctx context.Context, req *pbs.DeleteAccountRequest) error { +func validateDeleteRequest(req *pbs.DeleteAccountRequest) error { const op = "accounts.validateDeleteRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.PasswordAccountPreviousPrefix, globals.PasswordAccountPrefix, globals.OidcAccountPrefix, globals.LdapAccountPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListAccountsRequest) error { +func validateListRequest(req *pbs.ListAccountsRequest) error { const op = "accounts.validateListRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetAuthMethodId()), globals.PasswordAuthMethodPrefix, globals.OidcAuthMethodPrefix, globals.LdapAuthMethodPrefix) { badFields[authMethodIdField] = "Invalid formatted identifier." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields[filterField] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { @@ -1326,10 +1326,10 @@ func validateListRequest(ctx context.Context, req *pbs.ListAccountsRequest) erro return nil } -func validateChangePasswordRequest(ctx context.Context, req *pbs.ChangePasswordRequest) error { +func validateChangePasswordRequest(req *pbs.ChangePasswordRequest) error { const op = "accounts.validateChangePasswordRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetId()), globals.PasswordAccountPreviousPrefix, globals.PasswordAccountPrefix) { @@ -1350,10 +1350,10 @@ func validateChangePasswordRequest(ctx context.Context, req *pbs.ChangePasswordR return nil } -func validateSetPasswordRequest(ctx context.Context, req *pbs.SetPasswordRequest) error { +func validateSetPasswordRequest(req *pbs.SetPasswordRequest) error { const op = "accounts.validateSetPasswordRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetId()), globals.PasswordAccountPreviousPrefix, globals.PasswordAccountPrefix) { diff --git a/internal/daemon/controller/handlers/accounts/account_service_test.go b/internal/daemon/controller/handlers/accounts/account_service_test.go index f68045e17c1..43efa922e67 100644 --- a/internal/daemon/controller/handlers/accounts/account_service_test.go +++ b/internal/daemon/controller/handlers/accounts/account_service_test.go @@ -70,7 +70,7 @@ func TestNewService(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) @@ -124,13 +124,13 @@ func TestGet(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -310,13 +310,13 @@ func TestListPassword(t *testing.T) { wrap := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kms) + return iam.NewRepository(rw, rw, kms) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kms) @@ -451,13 +451,13 @@ func TestListOidc(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -611,13 +611,13 @@ func TestListLdap(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -766,13 +766,13 @@ func TestDelete(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -889,13 +889,13 @@ func TestDelete_twice(t *testing.T) { rw := db.New(conn) kms := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kms) + return iam.NewRepository(rw, rw, kms) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kms) @@ -924,13 +924,13 @@ func TestCreatePassword(t *testing.T) { wrap := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kms) + return iam.NewRepository(rw, rw, kms) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kms) @@ -1171,13 +1171,13 @@ func TestCreateOidc(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -1408,13 +1408,13 @@ func TestCreateLdap(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -1687,13 +1687,13 @@ func TestUpdatePassword(t *testing.T) { wrap := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kms) + return iam.NewRepository(rw, rw, kms) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kms) @@ -2077,13 +2077,13 @@ func TestUpdateOidc(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -2467,13 +2467,13 @@ func TestUpdateLdap(t *testing.T) { wrap := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -2841,13 +2841,13 @@ func TestSetPassword(t *testing.T) { wrap := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kms) + return iam.NewRepository(rw, rw, kms) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kms) @@ -2983,13 +2983,13 @@ func TestChangePassword(t *testing.T) { wrap := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrap) pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kms) + return iam.NewRepository(rw, rw, kms) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kms) diff --git a/internal/daemon/controller/handlers/accounts/validate_test.go b/internal/daemon/controller/handlers/accounts/validate_test.go index 6bec8e121d8..966b27304fa 100644 --- a/internal/daemon/controller/handlers/accounts/validate_test.go +++ b/internal/daemon/controller/handlers/accounts/validate_test.go @@ -4,7 +4,6 @@ package accounts import ( - "context" "fmt" "strings" "testing" @@ -223,7 +222,7 @@ func TestValidateCreateRequest(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() req := &pbs.CreateAccountRequest{Item: tc.item} - err := validateCreateRequest(context.Background(), req) + err := validateCreateRequest(req) if tc.errContains == "" { require.NoError(t, err) return @@ -286,7 +285,7 @@ func TestValidateUpdateRequest(t *testing.T) { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() - err := validateUpdateRequest(context.Background(), tc.req) + err := validateUpdateRequest(tc.req) if tc.errContains == "" { require.NoError(t, err) return @@ -302,7 +301,7 @@ func TestValidateUpdateRequest(t *testing.T) { emailClaimField, nameClaimField, } - err := validateUpdateRequest(context.Background(), &pbs.UpdateAccountRequest{ + err := validateUpdateRequest(&pbs.UpdateAccountRequest{ Id: globals.OidcAccountPrefix + "_1234567890", UpdateMask: &fieldmaskpb.FieldMask{Paths: readOnlyFields}, }) @@ -320,7 +319,7 @@ func TestValidateUpdateRequest(t *testing.T) { issuerField, subjectField, } - err := validateUpdateRequest(context.Background(), &pbs.UpdateAccountRequest{ + err := validateUpdateRequest(&pbs.UpdateAccountRequest{ Id: globals.OidcAccountPrefix + "_1234567890", UpdateMask: &fieldmaskpb.FieldMask{Paths: readOnlyFields}, }) diff --git a/internal/daemon/controller/handlers/authmethods/authmethod_service.go b/internal/daemon/controller/handlers/authmethods/authmethod_service.go index 930ae33baab..231b4eea61a 100644 --- a/internal/daemon/controller/handlers/authmethods/authmethod_service.go +++ b/internal/daemon/controller/handlers/authmethods/authmethod_service.go @@ -94,13 +94,13 @@ type Service struct { var _ pbs.AuthMethodServiceServer = (*Service)(nil) // NewService returns a auth method service which handles auth method related requests to boundary. -func NewService(ctx context.Context, kms *kms.Kms, pwRepoFn common.PasswordAuthRepoFactory, oidcRepoFn common.OidcAuthRepoFactory, iamRepoFn common.IamRepoFactory, atRepoFn common.AuthTokenRepoFactory, ldapRepoFn common.LdapAuthRepoFactory, opt ...handlers.Option) (Service, error) { +func NewService(kms *kms.Kms, pwRepoFn common.PasswordAuthRepoFactory, oidcRepoFn common.OidcAuthRepoFactory, iamRepoFn common.IamRepoFactory, atRepoFn common.AuthTokenRepoFactory, ldapRepoFn common.LdapAuthRepoFactory, opt ...handlers.Option) (Service, error) { const op = "authmethods.NewService" if kms == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } if pwRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing password repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing password repository") } if oidcRepoFn == nil { return Service{}, fmt.Errorf("nil oidc repository provided") @@ -109,7 +109,7 @@ func NewService(ctx context.Context, kms *kms.Kms, pwRepoFn common.PasswordAuthR return Service{}, fmt.Errorf("nil ldap repository provided") } if iamRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } if atRepoFn == nil { return Service{}, fmt.Errorf("nil auth token repository provided") @@ -121,7 +121,7 @@ func NewService(ctx context.Context, kms *kms.Kms, pwRepoFn common.PasswordAuthR // ListAuthMethods implements the interface pbs.AuthMethodServiceServer. func (s Service) ListAuthMethods(ctx context.Context, req *pbs.ListAuthMethodsRequest) (*pbs.ListAuthMethodsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -156,7 +156,7 @@ func (s Service) ListAuthMethods(ctx context.Context, req *pbs.ListAuthMethodsRe return &pbs.ListAuthMethodsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -211,7 +211,7 @@ func (s Service) ListAuthMethods(ctx context.Context, req *pbs.ListAuthMethodsRe func (s Service) GetAuthMethod(ctx context.Context, req *pbs.GetAuthMethodRequest) (*pbs.GetAuthMethodResponse, error) { const op = "authmethods.(Service).GetAuthMethod" - if err := validateGetRequest(ctx, req); err != nil { + if err := validateGetRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetId(), action.Read) @@ -355,7 +355,7 @@ func (s Service) UpdateAuthMethod(ctx context.Context, req *pbs.UpdateAuthMethod func (s Service) ChangeState(ctx context.Context, req *pbs.ChangeStateRequest) (*pbs.ChangeStateResponse, error) { const op = "authmethods.(Service).ChangeState" - if err := validateChangeStateRequest(ctx, req); err != nil { + if err := validateChangeStateRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetId(), action.ChangeState) @@ -403,7 +403,7 @@ func (s Service) ChangeState(ctx context.Context, req *pbs.ChangeStateRequest) ( // DeleteAuthMethod implements the interface pbs.AuthMethodServiceServer. func (s Service) DeleteAuthMethod(ctx context.Context, req *pbs.DeleteAuthMethodRequest) (*pbs.DeleteAuthMethodResponse, error) { - if err := validateDeleteRequest(ctx, req); err != nil { + if err := validateDeleteRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetId(), action.Delete) @@ -420,7 +420,7 @@ func (s Service) DeleteAuthMethod(ctx context.Context, req *pbs.DeleteAuthMethod // Authenticate implements the interface pbs.AuthenticationServiceServer. func (s Service) Authenticate(ctx context.Context, req *pbs.AuthenticateRequest) (*pbs.AuthenticateResponse, error) { const op = "authmethod_service.(Service).Authenticate" - if err := validateAuthenticateRequest(ctx, req); err != nil { + if err := validateAuthenticateRequest(req); err != nil { return nil, err } @@ -961,10 +961,10 @@ func toAuthTokenProto(t *authtoken.AuthToken) *pba.AuthToken { // - The path passed in is correctly formatted // - All required parameters are set // - There are no conflicting parameters provided -func validateGetRequest(ctx context.Context, req *pbs.GetAuthMethodRequest) error { +func validateGetRequest(req *pbs.GetAuthMethodRequest) error { const op = "authmethod.validateGetRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "Missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "Missing request") } return handlers.ValidateGetRequest(handlers.NoopValidatorFn, req, globals.PasswordAuthMethodPrefix, globals.OidcAuthMethodPrefix, globals.LdapAuthMethodPrefix) } @@ -972,7 +972,7 @@ func validateGetRequest(ctx context.Context, req *pbs.GetAuthMethodRequest) erro func validateCreateRequest(ctx context.Context, req *pbs.CreateAuthMethodRequest) error { const op = "authmethod.validateCreateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "Missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "Missing request") } return handlers.ValidateCreateRequest(req.GetItem(), func() map[string]string { badFields := map[string]string{} @@ -1077,7 +1077,7 @@ func validateCreateRequest(ctx context.Context, req *pbs.CreateAuthMethodRequest func validateUpdateRequest(ctx context.Context, req *pbs.UpdateAuthMethodRequest) error { const op = "authmethod.validateUpdateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing request") } return handlers.ValidateUpdateRequest(req, req.GetItem(), func() map[string]string { badFields := map[string]string{} @@ -1205,25 +1205,25 @@ func validateUpdateRequest(ctx context.Context, req *pbs.UpdateAuthMethodRequest }, globals.PasswordAuthMethodPrefix, globals.OidcAuthMethodPrefix, globals.LdapAuthMethodPrefix) } -func validateDeleteRequest(ctx context.Context, req *pbs.DeleteAuthMethodRequest) error { +func validateDeleteRequest(req *pbs.DeleteAuthMethodRequest) error { const op = "authmethod.validateDeleteRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "Missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "Missing request") } return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.PasswordAuthMethodPrefix, globals.OidcAuthMethodPrefix, globals.LdapAuthMethodPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListAuthMethodsRequest) error { +func validateListRequest(req *pbs.ListAuthMethodsRequest) error { const op = "authmethod.validateListRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "Missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "Missing request") } badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Org.Prefix()) && req.GetScopeId() != scope.Global.String() { badFields[scopeIdField] = "This field must be 'global' or a valid org scope id." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { @@ -1232,10 +1232,10 @@ func validateListRequest(ctx context.Context, req *pbs.ListAuthMethodsRequest) e return nil } -func validateChangeStateRequest(ctx context.Context, req *pbs.ChangeStateRequest) error { +func validateChangeStateRequest(req *pbs.ChangeStateRequest) error { const op = "authmethod.validateChangeStateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "Missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "Missing request") } if st := subtypes.SubtypeFromId(domain, req.GetId()); st != oidc.Subtype { return handlers.NotFoundErrorf("This endpoint is only available for the %q Auth Method type.", oidc.Subtype.String()) @@ -1261,10 +1261,10 @@ func validateChangeStateRequest(ctx context.Context, req *pbs.ChangeStateRequest return nil } -func validateAuthenticateRequest(ctx context.Context, req *pbs.AuthenticateRequest) error { +func validateAuthenticateRequest(req *pbs.AuthenticateRequest) error { const op = "authmethod.validateAuthenticateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "Missing request") + return errors.NewDeprecated(errors.InvalidParameter, op, "Missing request") } badFields := make(map[string]string) diff --git a/internal/daemon/controller/handlers/authmethods/authmethod_service_test.go b/internal/daemon/controller/handlers/authmethods/authmethod_service_test.go index 17a35704723..32fccb9c764 100644 --- a/internal/daemon/controller/handlers/authmethods/authmethod_service_test.go +++ b/internal/daemon/controller/handlers/authmethods/authmethod_service_test.go @@ -105,10 +105,10 @@ func TestGet(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kmsCache) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -247,7 +247,7 @@ func TestGet(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := authmethods.NewService(ctx, kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err, "Couldn't create new auth_method service.") got, gErr := s.GetAuthMethod(requestauth.DisabledAuthTestContext(iamRepoFn, tc.scopeId), tc.req) @@ -281,10 +281,10 @@ func TestList(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kmsCache) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -459,7 +459,7 @@ func TestList(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := authmethods.NewService(ctx, kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err, "Couldn't create new auth_method service.") // First check with non-anonymous user @@ -512,10 +512,10 @@ func TestDelete(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kmsCache) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -529,7 +529,7 @@ func TestDelete(t *testing.T) { ldapAm := ldap.TestAuthMethod(t, conn, databaseWrapper, o.GetPublicId(), []string{"ldaps://ldap1"}) - s, err := authmethods.NewService(ctx, kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Error when getting new auth_method service.") cases := []struct { @@ -604,17 +604,17 @@ func TestDelete_twice(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrapper) o, _ := iam.TestScopes(t, iamRepo) am := password.TestAuthMethods(t, conn, o.GetPublicId(), 1)[0] - s, err := authmethods.NewService(ctx, kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err, "Error when getting new auth_method service.") req := &pbs.DeleteAuthMethodRequest{ @@ -637,7 +637,7 @@ func TestCreate(t *testing.T) { return iam.TestRepo(t, conn, wrapper), nil } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, testKms) + return password.NewRepository(rw, rw, testKms) } oidcRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, testKms) @@ -646,7 +646,7 @@ func TestCreate(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, testKms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, testKms) + return authtoken.NewRepository(rw, rw, testKms) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -1348,7 +1348,7 @@ func TestCreate(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := authmethods.NewService(ctx, testKms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(testKms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err, "Error when getting new auth_method service.") got, gErr := s.CreateAuthMethod(requestauth.DisabledAuthTestContext(iamRepoFn, tc.req.GetItem().GetScopeId()), tc.req) diff --git a/internal/daemon/controller/handlers/authmethods/ldap.go b/internal/daemon/controller/handlers/authmethods/ldap.go index c51b8778428..59ef8209254 100644 --- a/internal/daemon/controller/handlers/authmethods/ldap.go +++ b/internal/daemon/controller/handlers/authmethods/ldap.go @@ -26,13 +26,7 @@ var ldapMaskManager handlers.MaskManager func init() { var err error - if ldapMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{ - &ldapstore.AuthMethod{}, - }, - handlers.MaskSource{&pb.AuthMethod{}, &pb.LdapAuthMethodAttributes{}}, - ); err != nil { + if ldapMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&ldapstore.AuthMethod{}}, handlers.MaskSource{&pb.AuthMethod{}, &pb.LdapAuthMethodAttributes{}}); err != nil { panic(err) } diff --git a/internal/daemon/controller/handlers/authmethods/ldap_test.go b/internal/daemon/controller/handlers/authmethods/ldap_test.go index 8fa1911228e..a89a62bd10e 100644 --- a/internal/daemon/controller/handlers/authmethods/ldap_test.go +++ b/internal/daemon/controller/handlers/authmethods/ldap_test.go @@ -55,15 +55,15 @@ func Test_UpdateLdap(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrapper) o, _ := iam.TestScopes(t, iamRepo) - tested, err := authmethods.NewService(ctx, kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + tested, err := authmethods.NewService(kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Error when getting new auth_method service.") defaultScopeInfo := &scopepb.ScopeInfo{Id: o.GetPublicId(), Type: o.GetType(), ParentScopeId: scope.Global.String()} @@ -761,10 +761,10 @@ func TestAuthenticate_Ldap(t *testing.T) { return ldap.NewRepository(testCtx, testRw, testRw, testKms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(testCtx, testRw, testRw, testKms) + return password.NewRepository(testRw, testRw, testKms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(testCtx, testRw, testRw, testKms) + return authtoken.NewRepository(testRw, testRw, testKms) } orgDbWrapper, err := testKms.GetWrapper(testCtx, o.GetPublicId(), kms.KeyPurposeDatabase) @@ -945,7 +945,7 @@ func TestAuthenticate_Ldap(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := authmethods.NewService(testCtx, testKms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(testKms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err) resp, err := s.Authenticate(auth.DisabledAuthTestContext(iamRepoFn, o.GetPublicId()), tc.request) diff --git a/internal/daemon/controller/handlers/authmethods/oidc.go b/internal/daemon/controller/handlers/authmethods/oidc.go index 784d88e0267..066310a789a 100644 --- a/internal/daemon/controller/handlers/authmethods/oidc.go +++ b/internal/daemon/controller/handlers/authmethods/oidc.go @@ -52,7 +52,7 @@ var oidcMaskManager handlers.MaskManager func init() { var err error - if oidcMaskManager, err = handlers.NewMaskManager(context.Background(), handlers.MaskDestination{&oidcstore.AuthMethod{}}, handlers.MaskSource{&pb.AuthMethod{}, &pb.OidcAuthMethodAttributes{}}); err != nil { + if oidcMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&oidcstore.AuthMethod{}}, handlers.MaskSource{&pb.AuthMethod{}, &pb.OidcAuthMethodAttributes{}}); err != nil { panic(err) } @@ -398,7 +398,7 @@ func validateAuthenticateOidcRequest(req *pbs.AuthenticateRequest) error { func toStorageOidcAuthMethod(ctx context.Context, scopeId string, in *pb.AuthMethod) (out *oidc.AuthMethod, dryRun, forced bool, err error) { const op = "authmethod_service.toStorageOidcAuthMethod" if in == nil { - return nil, false, false, errors.New(ctx, errors.InvalidParameter, op, "nil auth method.") + return nil, false, false, errors.NewDeprecated(errors.InvalidParameter, op, "nil auth method.") } attrs := in.GetOidcAuthMethodsAttributes() clientId := attrs.GetClientId().GetValue() @@ -418,14 +418,14 @@ func toStorageOidcAuthMethod(ctx context.Context, scopeId string, in *pb.AuthMet iss = strings.SplitN(iss, ".well-known/", 2)[0] issuer, err := url.Parse(iss) if err != nil { - return nil, false, false, errors.Wrap(ctx, err, op, errors.WithMsg("cannot parse issuer"), errors.WithCode(errors.InvalidParameter)) + return nil, false, false, errors.WrapDeprecated(err, op, errors.WithMsg("cannot parse issuer"), errors.WithCode(errors.InvalidParameter)) } opts = append(opts, oidc.WithIssuer(issuer)) } if apiUrl := strings.TrimSpace(attrs.GetApiUrlPrefix().GetValue()); apiUrl != "" { apiU, err := url.Parse(apiUrl) if err != nil { - return nil, false, false, errors.Wrap(ctx, err, op, errors.WithMsg("cannot parse api_url_prefix"), errors.WithCode(errors.InvalidParameter)) + return nil, false, false, errors.WrapDeprecated(err, op, errors.WithMsg("cannot parse api_url_prefix"), errors.WithCode(errors.InvalidParameter)) } opts = append(opts, oidc.WithApiUrl(apiU)) } @@ -466,17 +466,17 @@ func toStorageOidcAuthMethod(ctx context.Context, scopeId string, in *pb.AuthMet for _, v := range attrs.GetAccountClaimMaps() { acm, err := oidc.ParseAccountClaimMaps(ctx, v) if err != nil { - return nil, false, false, errors.Wrap(ctx, err, op) + return nil, false, false, errors.WrapDeprecated(err, op) } if len(acm) > 1 { - return nil, false, false, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to parse account claim map %s", v)) + return nil, false, false, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to parse account claim map %s", v)) } var m oidc.ClaimMap for _, m = range acm { } to, err := oidc.ConvertToAccountToClaim(ctx, m.To) if err != nil { - return nil, false, false, errors.Wrap(ctx, err, op) + return nil, false, false, errors.WrapDeprecated(err, op) } claimsMap[m.From] = to } diff --git a/internal/daemon/controller/handlers/authmethods/oidc_test.go b/internal/daemon/controller/handlers/authmethods/oidc_test.go index da4b0e64793..8de3d0931a5 100644 --- a/internal/daemon/controller/handlers/authmethods/oidc_test.go +++ b/internal/daemon/controller/handlers/authmethods/oidc_test.go @@ -98,17 +98,17 @@ func getSetup(t *testing.T) setup { return ldap.NewRepository(ctx, ret.rw, ret.rw, ret.kmsCache) } ret.pwRepoFn = func() (*password.Repository, error) { - return password.NewRepository(ctx, ret.rw, ret.rw, ret.kmsCache) + return password.NewRepository(ret.rw, ret.rw, ret.kmsCache) } ret.atRepoFn = func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, ret.rw, ret.rw, ret.kmsCache) + return authtoken.NewRepository(ret.rw, ret.rw, ret.kmsCache) } ret.org, ret.proj = iam.TestScopes(t, ret.iamRepo) ret.databaseWrapper, err = ret.kmsCache.GetWrapper(ret.ctx, ret.org.PublicId, kms.KeyPurposeDatabase) require.NoError(err) - ret.authMethodService, err = authmethods.NewService(ret.ctx, ret.kmsCache, ret.pwRepoFn, ret.oidcRepoFn, ret.iamRepoFn, ret.atRepoFn, ret.ldapRepoFn) + ret.authMethodService, err = authmethods.NewService(ret.kmsCache, ret.pwRepoFn, ret.oidcRepoFn, ret.iamRepoFn, ret.atRepoFn, ret.ldapRepoFn) require.NoError(err) ret.testProvider = capoidc.StartTestProvider(t) @@ -158,20 +158,20 @@ func TestList_FilterNonPublic(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kmsCache) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, wrapper) o, _ := iam.TestScopes(t, iamRepo) - databaseWrapper, err := kmsCache.GetWrapper(ctx, o.GetPublicId(), kms.KeyPurposeDatabase) + databaseWrapper, err := kmsCache.GetWrapper(context.Background(), o.GetPublicId(), kms.KeyPurposeDatabase) require.NoError(t, err) // 1 Public @@ -193,7 +193,7 @@ func TestList_FilterNonPublic(t *testing.T) { oidc.WithIssuer(oidc.TestConvertToUrls(t, fmt.Sprintf("https://alice%d.com", i))[0]), oidc.WithApiUrl(oidc.TestConvertToUrls(t, "https://api.com")[0])) } - s, err := authmethods.NewService(ctx, kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Couldn't create new auth_method service.") req := &pbs.ListAuthMethodsRequest{ @@ -248,7 +248,7 @@ func TestList_FilterNonPublic(t *testing.T) { } func TestUpdate_OIDC(t *testing.T) { - ctx := context.Background() + ctx := context.TODO() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -263,15 +263,15 @@ func TestUpdate_OIDC(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrapper) o, _ := iam.TestScopes(t, iamRepo) - tested, err := authmethods.NewService(ctx, kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + tested, err := authmethods.NewService(kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Error when getting new auth_method service.") defaultScopeInfo := &scopepb.ScopeInfo{Id: o.GetPublicId(), Type: o.GetType(), ParentScopeId: scope.Global.String()} @@ -1074,10 +1074,10 @@ func TestUpdate_OIDCDryRun(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kmsCache) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -1132,7 +1132,7 @@ func TestUpdate_OIDCDryRun(t *testing.T) { }, } - tested, err := authmethods.NewService(ctx, kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + tested, err := authmethods.NewService(kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Error when getting new auth_method service.") cases := []struct { name string @@ -1253,10 +1253,10 @@ func TestChangeState_OIDC(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kmsCache) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kmsCache) + return password.NewRepository(rw, rw, kmsCache) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -1281,7 +1281,7 @@ func TestChangeState_OIDC(t *testing.T) { mismatchedAM := oidc.TestAuthMethod(t, conn, databaseWrapper, o.PublicId, "inactive", "different_client_id", oidc.ClientSecret(tpClientSecret), oidc.WithIssuer(oidc.TestConvertToUrls(t, tp.Addr())[0]), oidc.WithSigningAlgs(oidc.EdDSA), oidc.WithApiUrl(oidc.TestConvertToUrls(t, "https://example.callback:58")[0]), oidc.WithCertificates(tpCert...)) - s, err := authmethods.NewService(ctx, kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kmsCache, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Error when getting new auth_method service.") wantTemplate := &pb.AuthMethod{ @@ -1591,9 +1591,8 @@ func TestAuthenticate_OIDC_Start(t *testing.T) { } func TestAuthenticate_OIDC_Token(t *testing.T) { - ctx := context.Background() s := getSetup(t) - testAtRepo, err := authtoken.NewRepository(ctx, s.rw, s.rw, s.kmsCache) + testAtRepo, err := authtoken.NewRepository(s.rw, s.rw, s.kmsCache) require.NoError(t, err) // a reusable test authmethod for the unit tests @@ -1650,7 +1649,7 @@ func TestAuthenticate_OIDC_Token(t *testing.T) { Attrs: &pbs.AuthenticateRequest_OidcAuthMethodAuthenticateTokenRequest{ OidcAuthMethodAuthenticateTokenRequest: &pb.OidcAuthMethodAuthenticateTokenRequest{ TokenId: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) oidc.TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return oidc.TestTokenRequestId(t, s.authMethod, s.kmsCache, 200*time.Second, tokenPublicId) @@ -1667,7 +1666,7 @@ func TestAuthenticate_OIDC_Token(t *testing.T) { Attrs: &pbs.AuthenticateRequest_OidcAuthMethodAuthenticateTokenRequest{ OidcAuthMethodAuthenticateTokenRequest: &pb.OidcAuthMethodAuthenticateTokenRequest{ TokenId: func() string { - tokenPublicId, err := authtoken.NewAuthTokenId(ctx) + tokenPublicId, err := authtoken.NewAuthTokenId() require.NoError(t, err) oidc.TestPendingToken(t, testAtRepo, testUser, testAcct, tokenPublicId) return oidc.TestTokenRequestId(t, s.authMethod, s.kmsCache, -20*time.Second, tokenPublicId) diff --git a/internal/daemon/controller/handlers/authmethods/password.go b/internal/daemon/controller/handlers/authmethods/password.go index f781a14d12c..f710a0e86ea 100644 --- a/internal/daemon/controller/handlers/authmethods/password.go +++ b/internal/daemon/controller/handlers/authmethods/password.go @@ -31,7 +31,7 @@ var pwMaskManager handlers.MaskManager func init() { var err error - if pwMaskManager, err = handlers.NewMaskManager(context.Background(), handlers.MaskDestination{&pwstore.AuthMethod{}}, handlers.MaskSource{&pb.AuthMethod{}, &pb.PasswordAuthMethodAttributes{}}); err != nil { + if pwMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&pwstore.AuthMethod{}}, handlers.MaskSource{&pb.AuthMethod{}, &pb.PasswordAuthMethodAttributes{}}); err != nil { panic(err) } @@ -47,7 +47,7 @@ func init() { // createPwInRepo creates a password auth method in a repo and returns the result. // This method should never return a nil AuthMethod without returning an error. func (s Service) createPwInRepo(ctx context.Context, scopeId string, item *pb.AuthMethod) (*password.AuthMethod, error) { - u, err := toStoragePwAuthMethod(ctx, scopeId, item) + u, err := toStoragePwAuthMethod(scopeId, item) if err != nil { return nil, err } @@ -63,7 +63,7 @@ func (s Service) createPwInRepo(ctx context.Context, scopeId string, item *pb.Au } func (s Service) updatePwInRepo(ctx context.Context, scopeId, id string, mask []string, item *pb.AuthMethod) (*password.AuthMethod, error) { - u, err := toStoragePwAuthMethod(ctx, scopeId, item) + u, err := toStoragePwAuthMethod(scopeId, item) if err != nil { return nil, err } @@ -174,10 +174,10 @@ func validateAuthenticatePasswordRequest(req *pbs.AuthenticateRequest) error { return nil } -func toStoragePwAuthMethod(ctx context.Context, scopeId string, item *pb.AuthMethod) (*password.AuthMethod, error) { +func toStoragePwAuthMethod(scopeId string, item *pb.AuthMethod) (*password.AuthMethod, error) { const op = "authmethod_service.toStoragePwAuthMethod" if item == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil auth method.") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil auth method.") } var opts []password.Option if item.GetName() != nil { @@ -186,7 +186,7 @@ func toStoragePwAuthMethod(ctx context.Context, scopeId string, item *pb.AuthMet if item.GetDescription() != nil { opts = append(opts, password.WithDescription(item.GetDescription().GetValue())) } - u, err := password.NewAuthMethod(ctx, scopeId, opts...) + u, err := password.NewAuthMethod(scopeId, opts...) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build auth method for creation: %v.", err) } diff --git a/internal/daemon/controller/handlers/authmethods/password_test.go b/internal/daemon/controller/handlers/authmethods/password_test.go index 060b704c87e..d8e975d4d93 100644 --- a/internal/daemon/controller/handlers/authmethods/password_test.go +++ b/internal/daemon/controller/handlers/authmethods/password_test.go @@ -50,15 +50,15 @@ func TestUpdate_Password(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrapper) o, _ := iam.TestScopes(t, iamRepo) - tested, err := authmethods.NewService(ctx, kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + tested, err := authmethods.NewService(kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(t, err, "Error when getting new auth_method service.") defaultScopeInfo := &scopepb.ScopeInfo{Id: o.GetPublicId(), Type: o.GetType(), ParentScopeId: scope.Global.String()} @@ -490,16 +490,16 @@ func TestAuthenticate_Password(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } am := password.TestAuthMethods(t, conn, o.GetPublicId(), 1)[0] iam.TestSetPrimaryAuthMethod(t, iam.TestRepo(t, conn, wrapper), o, am.PublicId) - acct, err := password.NewAccount(ctx, am.GetPublicId(), password.WithLoginName(testLoginName)) + acct, err := password.NewAccount(am.GetPublicId(), password.WithLoginName(testLoginName)) require.NoError(t, err) pwRepo, err := pwRepoFn() @@ -624,7 +624,7 @@ func TestAuthenticate_Password(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := authmethods.NewService(ctx, kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err) resp, err := s.Authenticate(auth.DisabledAuthTestContext(iamRepoFn, o.GetPublicId()), tc.request) @@ -671,14 +671,14 @@ func TestAuthenticate_AuthAccountConnectedToIamUser_Password(t *testing.T) { return ldap.NewRepository(ctx, rw, rw, kms) } pwRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } am := password.TestAuthMethods(t, conn, o.GetPublicId(), 1)[0] - acct, err := password.NewAccount(ctx, am.GetPublicId(), password.WithLoginName(testLoginName)) + acct, err := password.NewAccount(am.GetPublicId(), password.WithLoginName(testLoginName)) require.NoError(err) pwRepo, err := pwRepoFn() @@ -693,7 +693,7 @@ func TestAuthenticate_AuthAccountConnectedToIamUser_Password(t *testing.T) { iamUser, err := iamRepo.LookupUserWithLogin(context.Background(), acct.GetPublicId()) require.NoError(err) - s, err := authmethods.NewService(ctx, kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) + s, err := authmethods.NewService(kms, pwRepoFn, oidcRepoFn, iamRepoFn, atRepoFn, ldapRepoFn) require.NoError(err) resp, err := s.Authenticate(auth.DisabledAuthTestContext(iamRepoFn, o.GetPublicId()), &pbs.AuthenticateRequest{ AuthMethodId: am.GetPublicId(), diff --git a/internal/daemon/controller/handlers/authtokens/authtoken_service.go b/internal/daemon/controller/handlers/authtokens/authtoken_service.go index 38b226a11ee..410c857d741 100644 --- a/internal/daemon/controller/handlers/authtokens/authtoken_service.go +++ b/internal/daemon/controller/handlers/authtokens/authtoken_service.go @@ -53,20 +53,20 @@ type Service struct { var _ pbs.AuthTokenServiceServer = (*Service)(nil) // NewService returns a user service which handles user related requests to boundary. -func NewService(ctx context.Context, repo common.AuthTokenRepoFactory, iamRepoFn common.IamRepoFactory) (Service, error) { +func NewService(repo common.AuthTokenRepoFactory, iamRepoFn common.IamRepoFactory) (Service, error) { const op = "authtoken.NewService" if repo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing auth token repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing auth token repository") } if iamRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } return Service{repoFn: repo, iamRepoFn: iamRepoFn}, nil } // ListAuthTokens implements the interface pbs.AuthTokenServiceServer. func (s Service) ListAuthTokens(ctx context.Context, req *pbs.ListAuthTokensRequest) (*pbs.ListAuthTokensResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -101,7 +101,7 @@ func (s Service) ListAuthTokens(ctx context.Context, req *pbs.ListAuthTokensRequ return &pbs.ListAuthTokensResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -376,13 +376,13 @@ func validateDeleteRequest(req *pbs.DeleteAuthTokenRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.AuthTokenPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListAuthTokensRequest) error { +func validateListRequest(req *pbs.ListAuthTokensRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Org.Prefix()) && req.GetScopeId() != scope.Global.String() { badFields["scope_id"] = "This field must be 'global' or a valid org scope id." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/authtokens/authtoken_service_test.go b/internal/daemon/controller/handlers/authtokens/authtoken_service_test.go index e505ad4d77b..2d95a05cf65 100644 --- a/internal/daemon/controller/handlers/authtokens/authtoken_service_test.go +++ b/internal/daemon/controller/handlers/authtokens/authtoken_service_test.go @@ -36,7 +36,6 @@ import ( var testAuthorizedActions = []string{"no-op", "read", "read:self", "delete", "delete:self"} func TestGetSelf(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -46,13 +45,13 @@ func TestGetSelf(t *testing.T) { return iam.TestRepo(t, conn, wrap), nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } - a, err := authtokens.NewService(ctx, tokenRepoFn, iamRepoFn) + a, err := authtokens.NewService(tokenRepoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new auth token service.") o, _ := iam.TestScopes(t, iam.TestRepo(t, conn, wrap)) @@ -101,7 +100,7 @@ func TestGetSelf(t *testing.T) { Token: tc.token.GetToken(), } - ctx := auth.NewVerifierContext(ctx, iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) + ctx := auth.NewVerifierContext(context.Background(), iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) ctx = context.WithValue(ctx, requests.ContextRequestInformationKey, &requests.RequestContext{}) got, err := a.GetAuthToken(ctx, &pbs.GetAuthTokenRequest{Id: tc.readId}) if tc.err != nil { @@ -119,7 +118,6 @@ func TestGetSelf(t *testing.T) { } func TestGet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -128,10 +126,10 @@ func TestGet(t *testing.T) { return iam.TestRepo(t, conn, wrap), nil } repoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } - s, err := authtokens.NewService(ctx, repoFn, iamRepoFn) + s, err := authtokens.NewService(repoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new auth token service.") org, _ := iam.TestScopes(t, iam.TestRepo(t, conn, wrap)) @@ -195,7 +193,6 @@ func TestGet(t *testing.T) { } func TestList_Self(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -207,10 +204,10 @@ func TestList_Self(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(testCtx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(testCtx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } // This will result in the scope having default permissions, which now @@ -236,7 +233,7 @@ func TestList_Self(t *testing.T) { }, } - a, err := authtokens.NewService(testCtx, tokenRepoFn, iamRepoFn) + a, err := authtokens.NewService(tokenRepoFn, iamRepoFn) require.NoError(t, err) for _, tc := range cases { @@ -252,7 +249,7 @@ func TestList_Self(t *testing.T) { Token: tc.requester.GetToken(), } - ctx := auth.NewVerifierContext(testCtx, iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) + ctx := auth.NewVerifierContext(context.Background(), iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) got, err := a.ListAuthTokens(ctx, &pbs.ListAuthTokensRequest{ScopeId: o.GetPublicId()}) require.NoError(err) require.Len(got.Items, 1) @@ -272,7 +269,7 @@ func TestList(t *testing.T) { return iam.TestRepo(t, conn, wrap), nil } repoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(context.Background(), rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrap) @@ -390,7 +387,7 @@ func TestList(t *testing.T) { } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { - s, err := authtokens.NewService(context.Background(), repoFn, iamRepoFn) + s, err := authtokens.NewService(repoFn, iamRepoFn) assert, require := assert.New(t), require.New(t) require.NoError(err, "Couldn't create new user service.") @@ -420,7 +417,6 @@ func TestList(t *testing.T) { } func TestDeleteSelf(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -432,13 +428,13 @@ func TestDeleteSelf(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(testCtx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(testCtx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } - a, err := authtokens.NewService(testCtx, tokenRepoFn, iamRepoFn) + a, err := authtokens.NewService(tokenRepoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new auth token service.") o, _ := iam.TestScopes(t, iam.TestRepo(t, conn, wrap)) @@ -499,7 +495,7 @@ func TestDeleteSelf(t *testing.T) { Token: tc.token.GetToken(), } - ctx := auth.NewVerifierContext(testCtx, iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) + ctx := auth.NewVerifierContext(context.Background(), iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) got, err := a.DeleteAuthToken(ctx, &pbs.DeleteAuthTokenRequest{Id: tc.deleteId}) if tc.err != nil { require.EqualError(err, tc.err.Error()) @@ -512,7 +508,6 @@ func TestDeleteSelf(t *testing.T) { } func TestDelete(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -521,14 +516,14 @@ func TestDelete(t *testing.T) { return iam.TestRepo(t, conn, wrap), nil } repoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrap) org, _ := iam.TestScopes(t, iamRepo) at := authtoken.TestAuthToken(t, conn, kms, org.GetPublicId()) - s, err := authtokens.NewService(ctx, repoFn, iamRepoFn) + s, err := authtokens.NewService(repoFn, iamRepoFn) require.NoError(t, err, "Error when getting new user service.") cases := []struct { @@ -577,7 +572,6 @@ func TestDelete(t *testing.T) { func TestDelete_twice(t *testing.T) { assert, require := assert.New(t), require.New(t) - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrap := db.TestWrapper(t) @@ -586,14 +580,14 @@ func TestDelete_twice(t *testing.T) { return iam.TestRepo(t, conn, wrap), nil } repoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrap) org, _ := iam.TestScopes(t, iamRepo) at := authtoken.TestAuthToken(t, conn, kms, org.GetPublicId()) - s, err := authtokens.NewService(ctx, repoFn, iamRepoFn) + s, err := authtokens.NewService(repoFn, iamRepoFn) require.NoError(err, "Error when getting new user service") req := &pbs.DeleteAuthTokenRequest{ Id: at.GetPublicId(), diff --git a/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service.go b/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service.go index 6fa3f9a88f3..2bc899d93ff 100644 --- a/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service.go +++ b/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service.go @@ -89,18 +89,12 @@ var ( func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.CredentialLibrary{}}, - handlers.MaskSource{&pb.CredentialLibrary{}, &pb.VaultCredentialLibraryAttributes{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.CredentialLibrary{}}, + handlers.MaskSource{&pb.CredentialLibrary{}, &pb.VaultCredentialLibraryAttributes{}}); err != nil { panic(err) } - if sshCertMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.SSHCertificateCredentialLibrary{}}, - handlers.MaskSource{&pb.CredentialLibrary{}, &pb.VaultSSHCertificateCredentialLibraryAttributes{}}, - ); err != nil { + if sshCertMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.SSHCertificateCredentialLibrary{}}, + handlers.MaskSource{&pb.CredentialLibrary{}, &pb.VaultSSHCertificateCredentialLibraryAttributes{}}); err != nil { panic(err) } } @@ -116,20 +110,20 @@ type Service struct { var _ pbs.CredentialLibraryServiceServer = (*Service)(nil) // NewService returns a credential library service which handles credential library related requests to boundary. -func NewService(ctx context.Context, repo common.VaultCredentialRepoFactory, iamRepo common.IamRepoFactory) (Service, error) { +func NewService(repo common.VaultCredentialRepoFactory, iamRepo common.IamRepoFactory) (Service, error) { const op = "credentiallibraries.NewService" if iamRepo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } if repo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing vault credential repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing vault credential repository") } return Service{iamRepoFn: iamRepo, repoFn: repo}, nil } // ListCredentialLibraries implements the interface pbs.CredentialLibraryServiceServer func (s Service) ListCredentialLibraries(ctx context.Context, req *pbs.ListCredentialLibrariesRequest) (*pbs.ListCredentialLibrariesResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetCredentialStoreId(), action.List) @@ -145,7 +139,7 @@ func (s Service) ListCredentialLibraries(ctx context.Context, req *pbs.ListCrede return &pbs.ListCredentialLibrariesResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -172,7 +166,7 @@ func (s Service) ListCredentialLibraries(ctx context.Context, req *pbs.ListCrede outputOpts = append(outputOpts, handlers.WithAuthorizedActions(authorizedActions)) } - item, err := toProto(ctx, item, outputOpts...) + item, err := toProto(item, outputOpts...) if err != nil { return nil, err } @@ -218,7 +212,7 @@ func (s Service) GetCredentialLibrary(ctx context.Context, req *pbs.GetCredentia outputOpts = append(outputOpts, handlers.WithAuthorizedActions(authResults.FetchActionSetForId(ctx, cs.GetPublicId(), IdActions).Strings())) } - item, err := toProto(ctx, cs, outputOpts...) + item, err := toProto(cs, outputOpts...) if err != nil { return nil, err } @@ -256,7 +250,7 @@ func (s Service) CreateCredentialLibrary(ctx context.Context, req *pbs.CreateCre outputOpts = append(outputOpts, handlers.WithAuthorizedActions(authResults.FetchActionSetForId(ctx, cl.GetPublicId(), IdActions).Strings())) } - item, err := toProto(ctx, cl, outputOpts...) + item, err := toProto(cl, outputOpts...) if err != nil { return nil, err } @@ -319,7 +313,7 @@ func (s Service) UpdateCredentialLibrary(ctx context.Context, req *pbs.UpdateCre outputOpts = append(outputOpts, handlers.WithAuthorizedActions(authResults.FetchActionSetForId(ctx, cl.GetPublicId(), IdActions).Strings())) } - item, err := toProto(ctx, cl, outputOpts...) + item, err := toProto(cl, outputOpts...) if err != nil { return nil, err } @@ -402,7 +396,7 @@ func (s Service) createInRepo(ctx context.Context, scopeId string, item *pb.Cred var out credential.Library switch subtypes.SubtypeFromType(domain, item.GetType()) { case vault.SSHCertificateLibrarySubtype: - cl, err := toStorageVaultSSHCertificateLibrary(ctx, item.GetCredentialStoreId(), item) + cl, err := toStorageVaultSSHCertificateLibrary(item.GetCredentialStoreId(), item) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -419,7 +413,7 @@ func (s Service) createInRepo(ctx context.Context, scopeId string, item *pb.Cred } out = rl default: - cl, err := toStorageVaultLibrary(ctx, item.GetCredentialStoreId(), item) + cl, err := toStorageVaultLibrary(item.GetCredentialStoreId(), item) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -482,7 +476,7 @@ func (s Service) updateInRepo( if len(dbMasks) == 0 { return nil, handlers.InvalidArgumentErrorf("No valid fields included in the update mask.", map[string]string{"update_mask": "No valid fields provided in the update mask."}) } - cl, err := toStorageVaultSSHCertificateLibrary(ctx, item.GetCredentialStoreId(), item) + cl, err := toStorageVaultSSHCertificateLibrary(item.GetCredentialStoreId(), item) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -499,7 +493,7 @@ func (s Service) updateInRepo( if len(dbMasks) == 0 { return nil, handlers.InvalidArgumentErrorf("No valid fields included in the update mask.", map[string]string{"update_mask": "No valid fields provided in the update mask."}) } - cl, err := toStorageVaultLibrary(ctx, item.GetCredentialStoreId(), item) + cl, err := toStorageVaultLibrary(item.GetCredentialStoreId(), item) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -610,7 +604,7 @@ func (s Service) authResult(ctx context.Context, id string, a action.Type) auth. return auth.Verify(ctx, opts...) } -func toProto(ctx context.Context, in credential.Library, opt ...handlers.Option) (*pb.CredentialLibrary, error) { +func toProto(in credential.Library, opt ...handlers.Option) (*pb.CredentialLibrary, error) { const op = "credentiallibraries.toProto" opts := handlers.GetOpts(opt...) @@ -654,7 +648,7 @@ func toProto(ctx context.Context, in credential.Library, opt ...handlers.Option) case vault.GenericLibrarySubtype: vaultIn, ok := in.(*vault.CredentialLibrary) if !ok { - return nil, errors.New(ctx, errors.Internal, op, "unable to cast to vault credential library") + return nil, errors.NewDeprecated(errors.Internal, op, "unable to cast to vault credential library") } if outputFields.Has(globals.CredentialTypeField) && vaultIn.GetCredentialType() != string(credential.UnspecifiedType) { @@ -684,7 +678,7 @@ func toProto(ctx context.Context, in credential.Library, opt ...handlers.Option) if len(m) > 0 { mp, err := structpb.NewStruct(m) if err != nil { - return nil, errors.New(ctx, errors.Internal, op, "creating proto struct for mapping override") + return nil, errors.NewDeprecated(errors.Internal, op, "creating proto struct for mapping override") } out.CredentialMappingOverrides = mp } @@ -707,7 +701,7 @@ func toProto(ctx context.Context, in credential.Library, opt ...handlers.Option) case vault.SSHCertificateLibrarySubtype: vaultIn, ok := in.(*vault.SSHCertificateCredentialLibrary) if !ok { - return nil, errors.New(ctx, errors.Internal, op, "unable to cast to vault ssh certificate credential library") + return nil, errors.NewDeprecated(errors.Internal, op, "unable to cast to vault ssh certificate credential library") } // We don't check for mapping overrides here -- this subtype does not currently support them. out.CredentialType = vaultIn.GetCredentialType() @@ -749,7 +743,7 @@ func toProto(ctx context.Context, in credential.Library, opt ...handlers.Option) return &out, nil } -func toStorageVaultLibrary(ctx context.Context, storeId string, in *pb.CredentialLibrary) (out *vault.CredentialLibrary, err error) { +func toStorageVaultLibrary(storeId string, in *pb.CredentialLibrary) (out *vault.CredentialLibrary, err error) { const op = "credentiallibraries.toStorageVaultLibrary" var opts []vault.Option if in.GetName() != nil { @@ -807,12 +801,12 @@ func toStorageVaultLibrary(ctx context.Context, storeId string, in *pb.Credentia cs, err := vault.NewCredentialLibrary(storeId, attrs.GetPath().GetValue(), opts...) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to build credential library")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("unable to build credential library")) } return cs, err } -func toStorageVaultSSHCertificateLibrary(ctx context.Context, storeId string, in *pb.CredentialLibrary) (out *vault.SSHCertificateCredentialLibrary, err error) { +func toStorageVaultSSHCertificateLibrary(storeId string, in *pb.CredentialLibrary) (out *vault.SSHCertificateCredentialLibrary, err error) { const op = "credentiallibraries.toStorageVaultSSHCertificateLibrary" var opts []vault.Option if in.GetName() != nil { @@ -853,7 +847,7 @@ func toStorageVaultSSHCertificateLibrary(ctx context.Context, storeId string, in cs, err := vault.NewSSHCertificateCredentialLibrary(storeId, attrs.GetPath().GetValue(), attrs.GetUsername().GetValue(), opts...) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to build credential library")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("unable to build credential library")) } return cs, err } @@ -1050,12 +1044,12 @@ func validateDeleteRequest(req *pbs.DeleteCredentialLibraryRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.VaultCredentialLibraryPrefix, globals.VaultSshCertificateCredentialLibraryPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListCredentialLibrariesRequest) error { +func validateListRequest(req *pbs.ListCredentialLibrariesRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetCredentialStoreId()), globals.VaultCredentialStorePrefix) { badFields[globals.CredentialStoreIdField] = "This field must be a valid credential store id." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service_test.go b/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service_test.go index ef1cba9a495..c95a3851dbe 100644 --- a/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service_test.go +++ b/internal/daemon/controller/handlers/credentiallibraries/credentiallibrary_service_test.go @@ -40,7 +40,6 @@ import ( var testAuthorizedActions = []string{"no-op", "read", "update", "delete"} func TestList(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -52,7 +51,7 @@ func TestList(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prjNoLibs := iam.TestScopes(t, iamRepo) @@ -138,7 +137,7 @@ func TestList(t *testing.T) { } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new host set service.") // Test non-anonymous listing @@ -172,7 +171,6 @@ func TestList(t *testing.T) { } func TestList_Attributes(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -184,7 +182,7 @@ func TestList_Attributes(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) @@ -252,7 +250,7 @@ func TestList_Attributes(t *testing.T) { } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new host set service.") // Test non-anonymous listing @@ -286,7 +284,6 @@ func TestList_Attributes(t *testing.T) { } func TestCreate(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -298,7 +295,7 @@ func TestCreate(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) @@ -850,7 +847,7 @@ func TestCreate(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(err, "Error when getting new credential store service.") got, gErr := s.CreateCredentialLibrary(auth.DisabledAuthTestContext(iamRepoFn, prj.GetPublicId()), tc.req) @@ -886,7 +883,6 @@ func TestCreate(t *testing.T) { } func TestGet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -898,14 +894,14 @@ func TestGet(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) store := vault.TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 1)[0] unspecifiedLib := vault.TestCredentialLibraries(t, conn, wrapper, store.GetPublicId(), 1)[0] - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(t, err) repo, err := repoFn() @@ -1091,7 +1087,6 @@ func TestGet(t *testing.T) { } func TestDelete(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1103,7 +1098,7 @@ func TestDelete(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) @@ -1111,7 +1106,7 @@ func TestDelete(t *testing.T) { store := vault.TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 1)[0] vl := vault.TestCredentialLibraries(t, conn, wrapper, store.GetPublicId(), 1)[0] vl2 := vault.TestSSHCertificateCredentialLibraries(t, conn, wrapper, store.GetPublicId(), 1)[0] - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(t, err) cases := []struct { @@ -1157,7 +1152,6 @@ func TestDelete(t *testing.T) { } func TestUpdate(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1169,13 +1163,13 @@ func TestUpdate(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(testCtx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) ctx := auth.DisabledAuthTestContext(iamRepoFn, prj.GetPublicId()) - s, err := NewService(testCtx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(t, err) cs := vault.TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 2) store, diffStore := cs[0], cs[1] @@ -1928,7 +1922,6 @@ func TestUpdate(t *testing.T) { } func TestCreate_SSHCertificateCredentialLibrary(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1940,7 +1933,7 @@ func TestCreate_SSHCertificateCredentialLibrary(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) @@ -2176,7 +2169,7 @@ func TestCreate_SSHCertificateCredentialLibrary(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(err, "Error when getting new credential store service.") got, gErr := s.CreateCredentialLibrary(auth.DisabledAuthTestContext(iamRepoFn, prj.GetPublicId()), tc.req) @@ -2212,7 +2205,6 @@ func TestCreate_SSHCertificateCredentialLibrary(t *testing.T) { } func TestUpdate_SSHCertificateCredentialLibrary(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -2224,13 +2216,13 @@ func TestUpdate_SSHCertificateCredentialLibrary(t *testing.T) { return iamRepo, nil } repoFn := func() (*vault.Repository, error) { - return vault.NewRepository(testCtx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } _, prj := iam.TestScopes(t, iamRepo) ctx := auth.DisabledAuthTestContext(iamRepoFn, prj.GetPublicId()) - s, err := NewService(testCtx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(t, err) cs := vault.TestCredentialStores(t, conn, wrapper, prj.GetPublicId(), 2) store, diffStore := cs[0], cs[1] diff --git a/internal/daemon/controller/handlers/credentials/credential_service.go b/internal/daemon/controller/handlers/credentials/credential_service.go index 58f2359df14..696e337aefb 100644 --- a/internal/daemon/controller/handlers/credentials/credential_service.go +++ b/internal/daemon/controller/handlers/credentials/credential_service.go @@ -65,25 +65,16 @@ var ( func init() { var err error - if upMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.UsernamePasswordCredential{}}, - handlers.MaskSource{&pb.Credential{}, &pb.UsernamePasswordAttributes{}}, - ); err != nil { + if upMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.UsernamePasswordCredential{}}, + handlers.MaskSource{&pb.Credential{}, &pb.UsernamePasswordAttributes{}}); err != nil { panic(err) } - if spkMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.SshPrivateKeyCredential{}}, - handlers.MaskSource{&pb.Credential{}, &pb.SshPrivateKeyAttributes{}}, - ); err != nil { + if spkMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.SshPrivateKeyCredential{}}, + handlers.MaskSource{&pb.Credential{}, &pb.SshPrivateKeyAttributes{}}); err != nil { panic(err) } - if jsonMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.JsonCredential{}}, - handlers.MaskSource{&pb.Credential{}, &pb.JsonAttributes{}}, - ); err != nil { + if jsonMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.JsonCredential{}}, + handlers.MaskSource{&pb.Credential{}, &pb.JsonAttributes{}}); err != nil { panic(err) } } @@ -99,20 +90,20 @@ type Service struct { var _ pbs.CredentialServiceServer = (*Service)(nil) // NewService returns a credential service which handles credential related requests to boundary. -func NewService(ctx context.Context, repo common.StaticCredentialRepoFactory, iamRepo common.IamRepoFactory) (Service, error) { +func NewService(repo common.StaticCredentialRepoFactory, iamRepo common.IamRepoFactory) (Service, error) { const op = "credentials.NewService" if iamRepo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } if repo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing static credential repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing static credential repository") } return Service{iamRepoFn: iamRepo, repoFn: repo}, nil } // ListCredentials implements the interface pbs.CredentialServiceServer func (s Service) ListCredentials(ctx context.Context, req *pbs.ListCredentialsRequest) (*pbs.ListCredentialsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetCredentialStoreId(), action.List) @@ -128,7 +119,7 @@ func (s Service) ListCredentials(ctx context.Context, req *pbs.ListCredentialsRe return &pbs.ListCredentialsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -884,12 +875,12 @@ func validateDeleteRequest(req *pbs.DeleteCredentialRequest) error { ) } -func validateListRequest(ctx context.Context, req *pbs.ListCredentialsRequest) error { +func validateListRequest(req *pbs.ListCredentialsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetCredentialStoreId()), globals.StaticCredentialStorePrefix, globals.StaticCredentialStorePreviousPrefix) { badFields[globals.CredentialStoreIdField] = "This field must be a valid credential store id." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/credentials/credential_service_test.go b/internal/daemon/controller/handlers/credentials/credential_service_test.go index 0d29a7e3788..e0bddcdb112 100644 --- a/internal/daemon/controller/handlers/credentials/credential_service_test.go +++ b/internal/daemon/controller/handlers/credentials/credential_service_test.go @@ -43,7 +43,6 @@ import ( var testAuthorizedActions = []string{"no-op", "read", "update", "delete"} func TestList(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kkms := kms.TestKms(t, conn, wrapper) @@ -54,14 +53,14 @@ func TestList(t *testing.T) { return iamRepo, nil } staticRepoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kkms) + return static.NewRepository(context.Background(), rw, rw, kkms) } _, prj := iam.TestScopes(t, iamRepo) store := static.TestCredentialStore(t, conn, wrapper, prj.GetPublicId()) storeNoCreds := static.TestCredentialStore(t, conn, wrapper, prj.GetPublicId()) - databaseWrapper, err := kkms.GetWrapper(ctx, prj.GetPublicId(), kms.KeyPurposeDatabase) + databaseWrapper, err := kkms.GetWrapper(context.Background(), prj.GetPublicId(), kms.KeyPurposeDatabase) require.NoError(t, err) var wantCreds []*pb.Credential @@ -69,7 +68,7 @@ func TestList(t *testing.T) { user := fmt.Sprintf("user-%d", i) pass := fmt.Sprintf("pass-%d", i) c := static.TestUsernamePasswordCredential(t, conn, wrapper, user, pass, store.GetPublicId(), prj.GetPublicId()) - hm, err := crypto.HmacSha256(ctx, []byte(pass), databaseWrapper, []byte(store.GetPublicId()), nil, crypto.WithEd25519()) + hm, err := crypto.HmacSha256(context.Background(), []byte(pass), databaseWrapper, []byte(store.GetPublicId()), nil, crypto.WithEd25519()) require.NoError(t, err) wantCreds = append(wantCreds, &pb.Credential{ Id: c.GetPublicId(), @@ -89,7 +88,7 @@ func TestList(t *testing.T) { }) spk := static.TestSshPrivateKeyCredential(t, conn, wrapper, user, static.TestSshPrivateKeyPem, store.GetPublicId(), prj.GetPublicId()) - hm, err = crypto.HmacSha256(ctx, []byte(static.TestSshPrivateKeyPem), databaseWrapper, []byte(store.GetPublicId()), nil) + hm, err = crypto.HmacSha256(context.Background(), []byte(static.TestSshPrivateKeyPem), databaseWrapper, []byte(store.GetPublicId()), nil) require.NoError(t, err) wantCreds = append(wantCreds, &pb.Credential{ Id: spk.GetPublicId(), @@ -112,7 +111,7 @@ func TestList(t *testing.T) { assert.NoError(t, err) credJson := static.TestJsonCredential(t, conn, wrapper, store.GetPublicId(), prj.GetPublicId(), obj) - hm, err = crypto.HmacSha256(ctx, objBytes, databaseWrapper, []byte(store.GetPublicId()), nil) + hm, err = crypto.HmacSha256(context.Background(), objBytes, databaseWrapper, []byte(store.GetPublicId()), nil) require.NoError(t, err) wantCreds = append(wantCreds, &pb.Credential{ Id: credJson.GetPublicId(), @@ -176,7 +175,7 @@ func TestList(t *testing.T) { } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { - s, err := NewService(ctx, staticRepoFn, iamRepoFn) + s, err := NewService(staticRepoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new host set service.") // Test non-anonymous listing @@ -205,7 +204,6 @@ func TestList(t *testing.T) { } func TestGet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kkms := kms.TestKms(t, conn, wrapper) @@ -224,7 +222,7 @@ func TestGet(t *testing.T) { require.NoError(t, err) store := static.TestCredentialStore(t, conn, wrapper, prj.GetPublicId()) - s, err := NewService(ctx, staticRepoFn, iamRepoFn) + s, err := NewService(staticRepoFn, iamRepoFn) require.NoError(t, err) upCred := static.TestUsernamePasswordCredential(t, conn, wrapper, "user", "pass", store.GetPublicId(), prj.GetPublicId()) @@ -402,7 +400,6 @@ func TestGet(t *testing.T) { } func TestDelete(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -422,7 +419,7 @@ func TestDelete(t *testing.T) { _, prj := iam.TestScopes(t, iamRepo) store := static.TestCredentialStore(t, conn, wrapper, prj.GetPublicId()) - s, err := NewService(ctx, staticRepoFn, iamRepoFn) + s, err := NewService(staticRepoFn, iamRepoFn) require.NoError(t, err) upCred := static.TestUsernamePasswordCredential(t, conn, wrapper, "user", "pass", store.GetPublicId(), prj.GetPublicId()) @@ -480,7 +477,6 @@ func TestDelete(t *testing.T) { } func TestCreate(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kkms := kms.TestKms(t, conn, wrapper) @@ -751,7 +747,7 @@ func TestCreate(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := NewService(ctx, repoFn, iamRepoFn) + s, err := NewService(repoFn, iamRepoFn) require.NoError(err, "Error when getting new credential store service.") got, gErr := s.CreateCredential(auth.DisabledAuthTestContext(iamRepoFn, prj.GetPublicId()), tc.req) @@ -777,13 +773,13 @@ func TestCreate(t *testing.T) { assert.True(gotUpdateTime.AsTime().After(store.CreateTime.AsTime()), "New credential should have been updated after default credential store. Was updated %v, which is after %v", gotUpdateTime, store.CreateTime.AsTime()) // Calculate hmac - databaseWrapper, err := kkms.GetWrapper(ctx, prj.PublicId, kms.KeyPurposeDatabase) + databaseWrapper, err := kkms.GetWrapper(context.Background(), prj.PublicId, kms.KeyPurposeDatabase) require.NoError(err) switch tc.req.Item.Type { case credential.UsernamePasswordSubtype.String(): password := tc.req.GetItem().GetUsernamePasswordAttributes().GetPassword().GetValue() - hm, err := crypto.HmacSha256(ctx, []byte(password), databaseWrapper, []byte(store.GetPublicId()), nil, crypto.WithEd25519()) + hm, err := crypto.HmacSha256(context.Background(), []byte(password), databaseWrapper, []byte(store.GetPublicId()), nil, crypto.WithEd25519()) require.NoError(err) // Validate attributes equal @@ -794,7 +790,7 @@ func TestCreate(t *testing.T) { case credential.SshPrivateKeySubtype.String(): pk := tc.req.GetItem().GetSshPrivateKeyAttributes().GetPrivateKey().GetValue() - hm, err := crypto.HmacSha256(ctx, []byte(pk), databaseWrapper, []byte(store.GetPublicId()), nil) + hm, err := crypto.HmacSha256(context.Background(), []byte(pk), databaseWrapper, []byte(store.GetPublicId()), nil) require.NoError(err) // Validate attributes equal @@ -804,7 +800,7 @@ func TestCreate(t *testing.T) { assert.Empty(got.GetItem().GetSshPrivateKeyAttributes().GetPrivateKey()) if pass := tc.req.GetItem().GetSshPrivateKeyAttributes().GetPrivateKeyPassphrase().GetValue(); pass != "" { - hm, err := crypto.HmacSha256(ctx, []byte(pass), databaseWrapper, []byte(store.GetPublicId()), nil) + hm, err := crypto.HmacSha256(context.Background(), []byte(pass), databaseWrapper, []byte(store.GetPublicId()), nil) require.NoError(err) assert.Equal(base64.RawURLEncoding.EncodeToString([]byte(hm)), got.GetItem().GetSshPrivateKeyAttributes().GetPrivateKeyPassphraseHmac()) @@ -812,7 +808,7 @@ func TestCreate(t *testing.T) { } case credential.JsonSubtype.String(): - hm, err := crypto.HmacSha256(ctx, objBytes, databaseWrapper, []byte(store.GetPublicId()), nil) + hm, err := crypto.HmacSha256(context.Background(), objBytes, databaseWrapper, []byte(store.GetPublicId()), nil) require.NoError(err) // Validate attributes equal @@ -863,7 +859,7 @@ func TestUpdate(t *testing.T) { _, prj := iam.TestScopes(t, iamRepo) ctx := auth.DisabledAuthTestContext(iamRepoFn, prj.GetPublicId()) - s, err := NewService(ctx, staticRepoFn, iamRepoFn) + s, err := NewService(staticRepoFn, iamRepoFn) require.NoError(t, err) fieldmask := func(paths ...string) *fieldmaskpb.FieldMask { diff --git a/internal/daemon/controller/handlers/credentialstores/credentialstore_service.go b/internal/daemon/controller/handlers/credentialstores/credentialstore_service.go index e7322e9ee07..33ba832ec3b 100644 --- a/internal/daemon/controller/handlers/credentialstores/credentialstore_service.go +++ b/internal/daemon/controller/handlers/credentialstores/credentialstore_service.go @@ -79,11 +79,8 @@ func vaultWorkerFilterUnsupported(string) error { func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.CredentialStore{}, &store.Token{}, &store.ClientCertificate{}}, - handlers.MaskSource{&pb.CredentialStore{}, &pb.VaultCredentialStoreAttributes{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.CredentialStore{}, &store.Token{}, &store.ClientCertificate{}}, + handlers.MaskSource{&pb.CredentialStore{}, &pb.VaultCredentialStoreAttributes{}}); err != nil { panic(err) } } @@ -121,7 +118,7 @@ func NewService( // ListCredentialStores implements the interface pbs.CredentialStoreServiceServer func (s Service) ListCredentialStores(ctx context.Context, req *pbs.ListCredentialStoresRequest) (*pbs.ListCredentialStoresResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -156,7 +153,7 @@ func (s Service) ListCredentialStores(ctx context.Context, req *pbs.ListCredenti return &pbs.ListCredentialStoresResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -761,7 +758,7 @@ func toStorageVaultStore(ctx context.Context, scopeId string, in *pb.CredentialS if pemPk != nil { pk = pem.EncodeToMemory(pemPk) } - cc, err := vault.NewClientCertificate(ctx, cert, pk) + cc, err := vault.NewClientCertificate(cert, pk) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -885,13 +882,13 @@ func validateDeleteRequest(req *pbs.DeleteCredentialStoreRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.VaultCredentialStorePrefix, globals.StaticCredentialStorePrefix, globals.StaticCredentialStorePreviousPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListCredentialStoresRequest) error { +func validateListRequest(req *pbs.ListCredentialStoresRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Project.Prefix()) && !req.GetRecursive() { badFields[globals.ScopeIdField] = "This field must be a valid project scope ID or the list operation must be recursive." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/credentialstores/credentialstore_service_test.go b/internal/daemon/controller/handlers/credentialstores/credentialstore_service_test.go index 6b356af2934..b35ad256624 100644 --- a/internal/daemon/controller/handlers/credentialstores/credentialstore_service_test.go +++ b/internal/daemon/controller/handlers/credentialstores/credentialstore_service_test.go @@ -66,7 +66,7 @@ func TestList(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) rw := db.New(conn) - err := vault.RegisterJobs(ctx, sche, rw, rw, kms) + err := vault.RegisterJobs(context.Background(), sche, rw, rw, kms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) @@ -74,10 +74,10 @@ func TestList(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(ctx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prjNoStores := iam.TestScopes(t, iamRepo) @@ -208,7 +208,7 @@ func TestCreateVault(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) rw := db.New(conn) - err := vault.RegisterJobs(ctx, sche, rw, rw, kms) + err := vault.RegisterJobs(context.Background(), sche, rw, rw, kms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) @@ -216,10 +216,10 @@ func TestCreateVault(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(ctx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prj := iam.TestScopes(t, iamRepo) @@ -597,10 +597,10 @@ func TestCreateStatic(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(ctx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prj := iam.TestScopes(t, iamRepo) @@ -763,10 +763,10 @@ func TestGet(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(ctx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prj := iam.TestScopes(t, iamRepo) @@ -897,10 +897,10 @@ func TestDelete(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(ctx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prj := iam.TestScopes(t, iamRepo) @@ -958,13 +958,12 @@ func TestDelete(t *testing.T) { } func TestUpdateVault(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) sche := scheduler.TestScheduler(t, conn, wrapper) rw := db.New(conn) - err := vault.RegisterJobs(testCtx, sche, rw, rw, kms) + err := vault.RegisterJobs(context.Background(), sche, rw, rw, kms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) @@ -972,10 +971,10 @@ func TestUpdateVault(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(testCtx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(testCtx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prj := iam.TestScopes(t, iamRepo) @@ -993,12 +992,12 @@ func TestUpdateVault(t *testing.T) { v := vault.NewTestVaultServer(t, vault.WithTestVaultTLS(vault.TestClientTLS), vault.WithClientKey(key)) _, token1b := v.CreateToken(t) - clientCert, err := vault.NewClientCertificate(ctx, v.ClientCert, v.ClientKey) + clientCert, err := vault.NewClientCertificate(v.ClientCert, v.ClientKey) require.NoError(t, err) v2 := vault.NewTestVaultServer(t, vault.WithTestVaultTLS(vault.TestClientTLS), vault.WithClientKey(key)) _, token2 := v2.CreateToken(t) - clientCert2, err := vault.NewClientCertificate(ctx, v2.ClientCert, v2.ClientKey) + clientCert2, err := vault.NewClientCertificate(v2.ClientCert, v2.ClientKey) require.NoError(t, err) freshStore := func() (*vault.CredentialStore, func()) { @@ -1294,7 +1293,6 @@ func TestUpdateVault(t *testing.T) { } func TestUpdateStatic(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1306,10 +1304,10 @@ func TestUpdateStatic(t *testing.T) { return iamRepo, nil } vaultRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(testCtx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticRepoFn := func() (*credstatic.Repository, error) { - return credstatic.NewRepository(testCtx, rw, rw, kms) + return credstatic.NewRepository(context.Background(), rw, rw, kms) } _, prj := iam.TestScopes(t, iamRepo) diff --git a/internal/daemon/controller/handlers/filtering.go b/internal/daemon/controller/handlers/filtering.go index e02bfa3521b..210516806b1 100644 --- a/internal/daemon/controller/handlers/filtering.go +++ b/internal/daemon/controller/handlers/filtering.go @@ -4,8 +4,6 @@ package handlers import ( - "context" - "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/filter" "github.com/hashicorp/go-bexpr" @@ -23,14 +21,14 @@ type Filter struct { // NewFilter returns a Filter which can be evluated against. An empty string paramter indicates // all items passed to it should succeed. -func NewFilter(ctx context.Context, f string) (*Filter, error) { +func NewFilter(f string) (*Filter, error) { const op = "handlers.NewFilter" if f == "" { return &Filter{}, nil } e, err := bexpr.CreateEvaluator(f, bexpr.WithTagName("json"), bexpr.WithHookFn(filter.WellKnownTypeFilterHook)) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("couldn't build filter"), errors.WithCode(errors.InvalidParameter)) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("couldn't build filter"), errors.WithCode(errors.InvalidParameter)) } return &Filter{eval: e}, nil } diff --git a/internal/daemon/controller/handlers/filtering_test.go b/internal/daemon/controller/handlers/filtering_test.go index a96e870c787..f94496f710d 100644 --- a/internal/daemon/controller/handlers/filtering_test.go +++ b/internal/daemon/controller/handlers/filtering_test.go @@ -4,7 +4,6 @@ package handlers import ( - "context" "testing" "github.com/stretchr/testify/assert" @@ -13,7 +12,7 @@ import ( ) func TestNewFilter_everythingMatchesEmpty(t *testing.T) { - f, err := NewFilter(context.Background(), "") + f, err := NewFilter("") require.NoError(t, err) for _, v := range []any{ nil, @@ -33,7 +32,6 @@ func TestNewFilter_everythingMatchesEmpty(t *testing.T) { } func TestNewFilter(t *testing.T) { - ctx := context.Background() type embedded struct { Name string `json:"name"` } @@ -112,7 +110,7 @@ func TestNewFilter(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { - f, err := NewFilter(ctx, tc.filter) + f, err := NewFilter(tc.filter) if tc.fErr { require.Error(t, err) return diff --git a/internal/daemon/controller/handlers/groups/group_service.go b/internal/daemon/controller/handlers/groups/group_service.go index fa02e06df25..ef7140413b7 100644 --- a/internal/daemon/controller/handlers/groups/group_service.go +++ b/internal/daemon/controller/handlers/groups/group_service.go @@ -52,11 +52,7 @@ var ( func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.Group{}}, - handlers.MaskSource{&pb.Group{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.Group{}}, handlers.MaskSource{&pb.Group{}}); err != nil { panic(err) } } @@ -71,17 +67,17 @@ type Service struct { var _ pbs.GroupServiceServer = (*Service)(nil) // NewService returns a group service which handles group related requests to boundary. -func NewService(ctx context.Context, repo common.IamRepoFactory) (Service, error) { +func NewService(repo common.IamRepoFactory) (Service, error) { const op = "groups.NewService" if repo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } return Service{repoFn: repo}, nil } // ListGroups implements the interface pbs.GroupServiceServer. func (s Service) ListGroups(ctx context.Context, req *pbs.ListGroupsRequest) (*pbs.ListGroupsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -116,7 +112,7 @@ func (s Service) ListGroups(ctx context.Context, req *pbs.ListGroupsRequest) (*p return &pbs.ListGroupsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -423,7 +419,7 @@ func (s Service) createInRepo(ctx context.Context, scopeId string, item *pb.Grou if item.GetDescription() != nil { opts = append(opts, iam.WithDescription(item.GetDescription().GetValue())) } - u, err := iam.NewGroup(ctx, scopeId, opts...) + u, err := iam.NewGroup(scopeId, opts...) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build group for creation: %v.", err) } @@ -451,7 +447,7 @@ func (s Service) updateInRepo(ctx context.Context, scopeId, id string, mask []st opts = append(opts, iam.WithName(name.GetValue())) } version := item.GetVersion() - g, err := iam.NewGroup(ctx, scopeId, opts...) + g, err := iam.NewGroup(scopeId, opts...) if err != nil { return nil, nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build group for update: %v.", err) } @@ -685,14 +681,14 @@ func validateDeleteRequest(req *pbs.DeleteGroupRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.GroupPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListGroupsRequest) error { +func validateListRequest(req *pbs.ListGroupsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Org.Prefix()) && !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Project.Prefix()) && req.GetScopeId() != scope.Global.String() { badFields["scope_id"] = "Incorrectly formatted identifier." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/groups/group_service_test.go b/internal/daemon/controller/handlers/groups/group_service_test.go index fac12835f43..21d3fd181df 100644 --- a/internal/daemon/controller/handlers/groups/group_service_test.go +++ b/internal/daemon/controller/handlers/groups/group_service_test.go @@ -76,7 +76,6 @@ func equalMembers(g *pb.Group, members []string) bool { } func TestGet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) @@ -200,7 +199,7 @@ func TestGet(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetGroupRequest) proto.Merge(req, tc.req) - s, err := groups.NewService(ctx, repoFn) + s, err := groups.NewService(repoFn) require.NoError(err, "Couldn't create new group service.") got, gErr := s.GetGroup(auth.DisabledAuthTestContext(repoFn, tc.scopeId), req) @@ -214,7 +213,6 @@ func TestGet(t *testing.T) { } func TestList(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) @@ -322,7 +320,7 @@ func TestList(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := groups.NewService(ctx, repoFn) + s, err := groups.NewService(repoFn) require.NoError(err, "Couldn't create new group service.") // Test with a non-anon user @@ -352,7 +350,7 @@ func TestList(t *testing.T) { func TestDelete(t *testing.T) { og, pg, repoFn := createDefaultGroupsAndRepo(t) - s, err := groups.NewService(context.Background(), repoFn) + s, err := groups.NewService(repoFn) require.NoError(t, err, "Error when getting new group service.") cases := []struct { @@ -418,7 +416,7 @@ func TestDelete_twice(t *testing.T) { assert, require := assert.New(t), require.New(t) og, pg, repoFn := createDefaultGroupsAndRepo(t) - s, err := groups.NewService(context.Background(), repoFn) + s, err := groups.NewService(repoFn) require.NoError(err, "Error when getting new group service") scopeId := og.GetScopeId() req := &pbs.DeleteGroupRequest{ @@ -544,7 +542,7 @@ func TestCreate(t *testing.T) { req := proto.Clone(toMerge).(*pbs.CreateGroupRequest) proto.Merge(req, tc.req) - s, err := groups.NewService(context.Background(), repoFn) + s, err := groups.NewService(repoFn) require.NoError(err, "Error when getting new group service.") got, gErr := s.CreateGroup(auth.DisabledAuthTestContext(repoFn, req.GetItem().GetScopeId()), req) @@ -572,7 +570,6 @@ func TestCreate(t *testing.T) { } func TestUpdate(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) @@ -597,12 +594,12 @@ func TestUpdate(t *testing.T) { require.NoError(t, err, "Couldn't get a new repo") if proj { pgVersion++ - pg, _, _, err = repo.UpdateGroup(ctx, pg, pgVersion, []string{"Name", "Description"}) + pg, _, _, err = repo.UpdateGroup(context.Background(), pg, pgVersion, []string{"Name", "Description"}) require.NoError(t, err, "Failed to reset the group") pgVersion++ } else { ogVersion++ - og, _, _, err = repo.UpdateGroup(ctx, og, ogVersion, []string{"Name", "Description"}) + og, _, _, err = repo.UpdateGroup(context.Background(), og, ogVersion, []string{"Name", "Description"}) require.NoError(t, err, "Failed to reset the group") ogVersion++ } @@ -613,7 +610,7 @@ func TestUpdate(t *testing.T) { Id: og.GetPublicId(), } - tested, err := groups.NewService(ctx, repoFn) + tested, err := groups.NewService(repoFn) require.NoError(t, err, "Error creating new service") cases := []struct { name string @@ -985,7 +982,7 @@ func TestAddMember(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := groups.NewService(context.Background(), repoFn) + s, err := groups.NewService(repoFn) require.NoError(t, err, "Error when getting new group service.") o, p := iam.TestScopes(t, iamRepo) @@ -1120,7 +1117,7 @@ func TestSetMember(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := groups.NewService(context.Background(), repoFn) + s, err := groups.NewService(repoFn) require.NoError(t, err, "Error when getting new group service.") o, p := iam.TestScopes(t, iamRepo) @@ -1250,7 +1247,7 @@ func TestRemoveMember(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := groups.NewService(context.Background(), repoFn) + s, err := groups.NewService(repoFn) require.NoError(t, err, "Error when getting new grp service.") o, p := iam.TestScopes(t, iamRepo) diff --git a/internal/daemon/controller/handlers/host_catalogs/host_catalog_service.go b/internal/daemon/controller/handlers/host_catalogs/host_catalog_service.go index 1a534d2bf12..3e2e9cf8b5e 100644 --- a/internal/daemon/controller/handlers/host_catalogs/host_catalog_service.go +++ b/internal/daemon/controller/handlers/host_catalogs/host_catalog_service.go @@ -75,18 +75,10 @@ const domain = "host" func init() { var err error - if staticMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.HostCatalog{}}, - handlers.MaskSource{&pb.HostCatalog{}}, - ); err != nil { + if staticMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.HostCatalog{}}, handlers.MaskSource{&pb.HostCatalog{}}); err != nil { panic(err) } - if pluginMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&pluginstore.HostCatalog{}}, - handlers.MaskSource{&pb.HostCatalog{}}, - ); err != nil { + if pluginMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&pluginstore.HostCatalog{}}, handlers.MaskSource{&pb.HostCatalog{}}); err != nil { panic(err) } } @@ -104,25 +96,25 @@ var _ pbs.HostCatalogServiceServer = (*Service)(nil) // NewService returns a host catalog Service which handles host catalog related requests to boundary and uses the provided // repositories for storage and retrieval. -func NewService(ctx context.Context, repoFn common.StaticRepoFactory, pluginHostRepoFn common.PluginHostRepoFactory, hostPluginRepoFn common.PluginRepoFactory, iamRepoFn common.IamRepoFactory) (Service, error) { +func NewService(repoFn common.StaticRepoFactory, pluginHostRepoFn common.PluginHostRepoFactory, hostPluginRepoFn common.PluginRepoFactory, iamRepoFn common.IamRepoFactory) (Service, error) { const op = "host_catalogs.NewService" if repoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing static repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing static repository") } if pluginHostRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing plugin host repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing plugin host repository") } if hostPluginRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing host plugin repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing host plugin repository") } if iamRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } return Service{staticRepoFn: repoFn, pluginHostRepoFn: pluginHostRepoFn, pluginRepoFn: hostPluginRepoFn, iamRepoFn: iamRepoFn}, nil } func (s Service) ListHostCatalogs(ctx context.Context, req *pbs.ListHostCatalogsRequest) (*pbs.ListHostCatalogsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -157,7 +149,7 @@ func (s Service) ListHostCatalogs(ctx context.Context, req *pbs.ListHostCatalogs return &pbs.ListHostCatalogsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -794,7 +786,7 @@ func toStorageStaticCatalog(ctx context.Context, projectId string, item *pb.Host if desc := item.GetDescription(); desc != nil { opts = append(opts, static.WithDescription(desc.GetValue())) } - hc, err := static.NewHostCatalog(ctx, projectId, opts...) + hc, err := static.NewHostCatalog(projectId, opts...) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to build host catalog")) } @@ -894,13 +886,13 @@ func validateDeleteRequest(req *pbs.DeleteHostCatalogRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.StaticHostCatalogPrefix, globals.PluginHostCatalogPrefix, globals.PluginHostCatalogPreviousPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListHostCatalogsRequest) error { +func validateListRequest(req *pbs.ListHostCatalogsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Project.Prefix()) && !req.GetRecursive() { badFields[globals.ScopeIdField] = "This field must be a valid project scope ID or the list operation must be recursive." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields[globals.FilterField] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/host_catalogs/host_catalog_service_test.go b/internal/daemon/controller/handlers/host_catalogs/host_catalog_service_test.go index bf12deffe23..7b4dc044a8b 100644 --- a/internal/daemon/controller/handlers/host_catalogs/host_catalog_service_test.go +++ b/internal/daemon/controller/handlers/host_catalogs/host_catalog_service_test.go @@ -88,10 +88,10 @@ func TestGet_Static(t *testing.T) { rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -152,7 +152,7 @@ func TestGet_Static(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetHostCatalogRequest) proto.Merge(req, tc.req) - s, err := host_catalogs.NewService(ctx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(err, "Couldn't create a new host catalog service.") got, gErr := s.GetHostCatalog(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), req) @@ -179,10 +179,10 @@ func TestGet_Plugin(t *testing.T) { rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -268,7 +268,7 @@ func TestGet_Plugin(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetHostCatalogRequest) proto.Merge(req, tc.req) - s, err := host_catalogs.NewService(ctx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(err, "Couldn't create a new host catalog service.") got, gErr := s.GetHostCatalog(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), req) @@ -296,13 +296,13 @@ func TestList(t *testing.T) { return iam.TestRepo(t, conn, wrapper), nil } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } iamRepo := iam.TestRepo(t, conn, wrapper) @@ -457,7 +457,7 @@ func TestList(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := host_catalogs.NewService(ctx, repoFn, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repoFn, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(err, "Couldn't create new auth_method service.") // Test with non-anon user @@ -500,10 +500,10 @@ func TestDelete_Static(t *testing.T) { _, proj := iam.TestScopes(t, iamRepo) rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -513,7 +513,7 @@ func TestDelete_Static(t *testing.T) { } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] - s, err := host_catalogs.NewService(ctx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(t, err, "Couldn't create a new host catalog service.") cases := []struct { @@ -571,10 +571,10 @@ func TestDelete_Plugin(t *testing.T) { _, proj := iam.TestScopes(t, iamRepo) rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -585,7 +585,7 @@ func TestDelete_Plugin(t *testing.T) { plg := plugin.TestPlugin(t, conn, "test") hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) - s, err := host_catalogs.NewService(ctx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(t, err, "Couldn't create a new host catalog service.") cases := []struct { @@ -634,7 +634,6 @@ func TestDelete_Plugin(t *testing.T) { func TestDelete_twice(t *testing.T) { t.Parallel() - testCtx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -644,20 +643,20 @@ func TestDelete_twice(t *testing.T) { _, proj := iam.TestScopes(t, iamRepo) rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(testCtx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(testCtx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { - return plugin.NewRepository(testCtx, rw, rw, kms) + return plugin.NewRepository(context.Background(), rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { return iamRepo, nil } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] - s, err := host_catalogs.NewService(testCtx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(err, "Couldn't create a new host catalog service.") req := &pbs.DeleteHostCatalogRequest{ Id: hc.GetPublicId(), @@ -681,10 +680,10 @@ func TestCreate_Static(t *testing.T) { _, proj := iam.TestScopes(t, iamRepo) rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -791,7 +790,7 @@ func TestCreate_Static(t *testing.T) { req := proto.Clone(toMerge).(*pbs.CreateHostCatalogRequest) proto.Merge(req, tc.req) - s, err := host_catalogs.NewService(ctx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(err, "Failed to create a new host catalog service.") got, gErr := s.CreateHostCatalog(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), req) @@ -834,7 +833,7 @@ func TestCreate_Plugin(t *testing.T) { org, proj := iam.TestScopes(t, iamRepo) rw := db.New(conn) repo := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -846,7 +845,7 @@ func TestCreate_Plugin(t *testing.T) { name := "test" plg := plugin.TestPlugin(t, conn, name) pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{ + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{ plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginHostServer{ OnCreateCatalogFn: func(ctx context.Context, req *plgpb.OnCreateCatalogRequest) (*plgpb.OnCreateCatalogResponse, error) { return nil, nil @@ -967,7 +966,7 @@ func TestCreate_Plugin(t *testing.T) { req := proto.Clone(toMerge).(*pbs.CreateHostCatalogRequest) proto.Merge(req, tc.req) - s, err := host_catalogs.NewService(ctx, repo, pluginHostRepo, pluginRepo, iamRepoFn) + s, err := host_catalogs.NewService(repo, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(err, "Failed to create a new host catalog service.") got, gErr := s.CreateHostCatalog(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), req) @@ -1012,10 +1011,10 @@ func TestUpdate_Static(t *testing.T) { _, proj := iam.TestScopes(t, iamRepo) rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } pluginRepo := func() (*plugin.Repository, error) { return plugin.NewRepository(ctx, rw, rw, kms) @@ -1023,10 +1022,10 @@ func TestUpdate_Static(t *testing.T) { iamRepoFn := func() (*iam.Repository, error) { return iamRepo, nil } - tested, err := host_catalogs.NewService(ctx, repoFn, pluginHostRepo, pluginRepo, iamRepoFn) + tested, err := host_catalogs.NewService(repoFn, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(t, err, "Failed to create a new host catalog service.") - hc, err := static.NewHostCatalog(ctx, proj.GetPublicId(), static.WithName("default"), static.WithDescription("default")) + hc, err := static.NewHostCatalog(proj.GetPublicId(), static.WithName("default"), static.WithDescription("default")) require.NoError(t, err, "Couldn't get new catalog.") repo, err := repoFn() require.NoError(t, err, "Couldn't create static repostitory") @@ -1372,7 +1371,6 @@ func TestUpdate_Static(t *testing.T) { func TestUpdate_Plugin(t *testing.T) { t.Parallel() - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1390,18 +1388,18 @@ func TestUpdate_Plugin(t *testing.T) { } repoFn := func() (*static.Repository, error) { - return static.NewRepository(testCtx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(testCtx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } pluginRepo := func() (*plugin.Repository, error) { - return plugin.NewRepository(testCtx, rw, rw, kms) + return plugin.NewRepository(context.Background(), rw, rw, kms) } iamRepoFn := func() (*iam.Repository, error) { return iamRepo, nil } - tested, err := host_catalogs.NewService(testCtx, repoFn, pluginHostRepo, pluginRepo, iamRepoFn) + tested, err := host_catalogs.NewService(repoFn, pluginHostRepo, pluginRepo, iamRepoFn) require.NoError(t, err, "Failed to create a new host catalog service.") ctx := auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()) diff --git a/internal/daemon/controller/handlers/host_sets/host_set_service.go b/internal/daemon/controller/handlers/host_sets/host_set_service.go index d92d91358b8..fae3f8c84d3 100644 --- a/internal/daemon/controller/handlers/host_sets/host_set_service.go +++ b/internal/daemon/controller/handlers/host_sets/host_set_service.go @@ -69,18 +69,10 @@ const domain = "host" func init() { var err error - if maskManager[static.Subtype], err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&staticstore.HostSet{}, &staticstore.UnimplementedSetFields{}}, - handlers.MaskSource{&pb.HostSet{}}, - ); err != nil { + if maskManager[static.Subtype], err = handlers.NewMaskManager(handlers.MaskDestination{&staticstore.HostSet{}, &staticstore.UnimplementedSetFields{}}, handlers.MaskSource{&pb.HostSet{}}); err != nil { panic(err) } - if maskManager[hostplugin.Subtype], err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&plugstore.HostSet{}}, - handlers.MaskSource{&pb.HostSet{}}, - ); err != nil { + if maskManager[hostplugin.Subtype], err = handlers.NewMaskManager(handlers.MaskDestination{&plugstore.HostSet{}}, handlers.MaskSource{&pb.HostSet{}}); err != nil { panic(err) } } @@ -96,13 +88,13 @@ var _ pbs.HostSetServiceServer = (*Service)(nil) // NewService returns a host set Service which handles host set related requests to boundary and uses the provided // repositories for storage and retrieval. -func NewService(ctx context.Context, staticRepoFn common.StaticRepoFactory, pluginRepoFn common.PluginHostRepoFactory) (Service, error) { +func NewService(staticRepoFn common.StaticRepoFactory, pluginRepoFn common.PluginHostRepoFactory) (Service, error) { const op = "host_sets.NewService" if staticRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing static repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing static repository") } if pluginRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing hostplugin repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing hostplugin repository") } return Service{staticRepoFn: staticRepoFn, pluginRepoFn: pluginRepoFn}, nil } @@ -112,7 +104,7 @@ func (s Service) ListHostSets(ctx context.Context, req *pbs.ListHostSetsRequest) } func (s Service) ListHostSetsWithOptions(ctx context.Context, req *pbs.ListHostSetsRequest, opt ...host.Option) (*pbs.ListHostSetsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } _, authResults := s.parentAndAuthResult(ctx, req.GetHostCatalogId(), action.List) @@ -127,7 +119,7 @@ func (s Service) ListHostSetsWithOptions(ctx context.Context, req *pbs.ListHostS return &pbs.ListHostSetsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -915,7 +907,7 @@ func toStorageStaticSet(ctx context.Context, catalogId string, item *pb.HostSet) if item.GetDescription() != nil { opts = append(opts, static.WithDescription(item.GetDescription().GetValue())) } - hs, err := static.NewHostSet(ctx, catalogId, opts...) + hs, err := static.NewHostSet(catalogId, opts...) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("Unable to build host set for creation")) } @@ -1021,12 +1013,12 @@ func validateDeleteRequest(req *pbs.DeleteHostSetRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.StaticHostSetPrefix, globals.PluginHostSetPrefix, globals.PluginHostSetPreviousPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListHostSetsRequest) error { +func validateListRequest(req *pbs.ListHostSetsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetHostCatalogId()), globals.StaticHostCatalogPrefix, globals.PluginHostCatalogPrefix, globals.PluginHostCatalogPreviousPrefix) { badFields[globals.HostCatalogIdField] = "The field is incorrectly formatted." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields[globals.FilterField] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/host_sets/host_set_service_test.go b/internal/daemon/controller/handlers/host_sets/host_set_service_test.go index 8724b5bd246..5b2d8e9fad9 100644 --- a/internal/daemon/controller/handlers/host_sets/host_set_service_test.go +++ b/internal/daemon/controller/handlers/host_sets/host_set_service_test.go @@ -52,7 +52,6 @@ var testAuthorizedActions = map[subtypes.Subtype][]string{ func TestGet_Static(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -67,10 +66,10 @@ func TestGet_Static(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] hs := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] @@ -128,7 +127,7 @@ func TestGet_Static(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetHostSetRequest) proto.Merge(req, tc.req) - s, err := host_sets.NewService(ctx, repoFn, pluginRepoFn) + s, err := host_sets.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create a new host set service.") got, gErr := s.GetHostSet(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), req) @@ -147,7 +146,6 @@ func TestGet_Static(t *testing.T) { func TestGet_Plugin(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -162,7 +160,7 @@ func TestGet_Plugin(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } name := "test" @@ -172,7 +170,7 @@ func TestGet_Plugin(t *testing.T) { plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginServer{}), } pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) @@ -245,7 +243,7 @@ func TestGet_Plugin(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetHostSetRequest) proto.Merge(req, tc.req) - s, err := host_sets.NewService(ctx, repoFn, pluginRepoFn) + s, err := host_sets.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create a new host set service.") got, gErr := s.GetHostSet(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), req) @@ -265,7 +263,6 @@ func TestGet_Plugin(t *testing.T) { } func TestList_Static(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -280,10 +277,10 @@ func TestList_Static(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } hcs := static.TestCatalogs(t, conn, proj.GetPublicId(), 2) hc, hcNoHosts := hcs[0], hcs[1] @@ -337,7 +334,7 @@ func TestList_Static(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := host_sets.NewService(ctx, repoFn, pluginRepoFn) + s, err := host_sets.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create new host set service.") // Test with non-anon user @@ -365,7 +362,6 @@ func TestList_Static(t *testing.T) { } func TestList_Plugin(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -380,7 +376,7 @@ func TestList_Plugin(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } name := "test" plg := plugin.TestPlugin(t, conn, name) @@ -388,7 +384,7 @@ func TestList_Plugin(t *testing.T) { plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginServer{}), } pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) hcNoHosts := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) @@ -451,7 +447,7 @@ func TestList_Plugin(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := host_sets.NewService(ctx, repoFn, pluginRepoFn) + s, err := host_sets.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create new host set service.") // Test with non-anon user @@ -480,7 +476,6 @@ func TestList_Plugin(t *testing.T) { func TestDelete_Static(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -495,15 +490,15 @@ func TestDelete_Static(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] h := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] - s, err := host_sets.NewService(ctx, repoFn, pluginRepoFn) + s, err := host_sets.NewService(repoFn, pluginRepoFn) require.NoError(t, err, "Couldn't create a new host set service.") cases := []struct { @@ -552,7 +547,6 @@ func TestDelete_Static(t *testing.T) { func TestDelete_Plugin(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -567,10 +561,10 @@ func TestDelete_Plugin(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } name := "test" plg := plugin.TestPlugin(t, conn, name) @@ -581,7 +575,7 @@ func TestDelete_Plugin(t *testing.T) { hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) h := hostplugin.TestSet(t, conn, kms, sche, hc, plgm) - s, err := host_sets.NewService(ctx, repoFn, pluginRepoFn) + s, err := host_sets.NewService(repoFn, pluginRepoFn) require.NoError(t, err, "Couldn't create a new host set service.") cases := []struct { @@ -630,7 +624,6 @@ func TestDelete_Plugin(t *testing.T) { func TestDelete_twice(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -646,20 +639,20 @@ func TestDelete_twice(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] h := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] - s, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + s, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(err, "Couldn't create a new host set service.") req := &pbs.DeleteHostSetRequest{ Id: h.GetPublicId(), } - ctx = auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()) + ctx := auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()) _, gErr := s.DeleteHostSet(ctx, req) assert.NoError(gErr, "First attempt") _, gErr = s.DeleteHostSet(ctx, req) @@ -669,7 +662,6 @@ func TestDelete_twice(t *testing.T) { func TestCreate_Static(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -684,10 +676,10 @@ func TestCreate_Static(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] prefEndpoints := []string{"cidr:1.2.3.4", "cidr:2.3.4.5/24"} @@ -792,7 +784,7 @@ func TestCreate_Static(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + s, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(err, "Failed to create a new host set service.") got, gErr := s.CreateHostSet(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), tc.req) @@ -826,7 +818,6 @@ func TestCreate_Static(t *testing.T) { func TestCreate_Plugin(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -841,12 +832,12 @@ func TestCreate_Plugin(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } name := "test" plg := plugin.TestPlugin(t, conn, name) plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{ + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{ plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginHostServer{ OnCreateSetFn: func(ctx context.Context, req *plgpb.OnCreateSetRequest) (*plgpb.OnCreateSetResponse, error) { return nil, nil @@ -1058,7 +1049,7 @@ func TestCreate_Plugin(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + s, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(err, "Failed to create a new host set service.") got, gErr := s.CreateHostSet(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), tc.req) @@ -1094,7 +1085,6 @@ func TestCreate_Plugin(t *testing.T) { func TestUpdate_Static(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1109,7 +1099,7 @@ func TestUpdate_Static(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } repo, err := repoFn() require.NoError(t, err, "Couldn't create new static repo.") @@ -1119,9 +1109,9 @@ func TestUpdate_Static(t *testing.T) { h := static.TestHosts(t, conn, hc.GetPublicId(), 2) hIds := []string{h[0].GetPublicId(), h[1].GetPublicId()} - hs, err := static.NewHostSet(ctx, hc.GetPublicId(), static.WithName("default"), static.WithDescription("default")) + hs, err := static.NewHostSet(hc.GetPublicId(), static.WithName("default"), static.WithDescription("default")) require.NoError(t, err) - hs, err = repo.CreateSet(ctx, proj.GetPublicId(), hs) + hs, err = repo.CreateSet(context.Background(), proj.GetPublicId(), hs) require.NoError(t, err) static.TestSetMembers(t, conn, hs.GetPublicId(), h) @@ -1130,7 +1120,7 @@ func TestUpdate_Static(t *testing.T) { resetHostSet := func() { version++ - _, _, _, err = repo.UpdateSet(ctx, proj.GetPublicId(), hs, version, []string{"Name", "Description"}) + _, _, _, err = repo.UpdateSet(context.Background(), proj.GetPublicId(), hs, version, []string{"Name", "Description"}) require.NoError(t, err, "Failed to reset host.") version++ } @@ -1141,9 +1131,9 @@ func TestUpdate_Static(t *testing.T) { } plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } - tested, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + tested, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(t, err, "Failed to create a new host set service.") cases := []struct { @@ -1476,7 +1466,6 @@ func TestUpdate_Static(t *testing.T) { func TestUpdate_Plugin(t *testing.T) { t.Parallel() - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1492,15 +1481,15 @@ func TestUpdate_Plugin(t *testing.T) { } repoFn := func() (*static.Repository, error) { - return static.NewRepository(testCtx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepo := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(testCtx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } iamRepoFn := func() (*iam.Repository, error) { return iamRepo, nil } - tested, err := host_sets.NewService(testCtx, repoFn, pluginHostRepo) + tested, err := host_sets.NewService(repoFn, pluginHostRepo) require.NoError(t, err, "Failed to create a new host catalog service.") hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) @@ -1815,7 +1804,6 @@ func TestUpdate_Plugin(t *testing.T) { } func TestAddHostSetHosts(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1830,12 +1818,12 @@ func TestAddHostSetHosts(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } - s, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + s, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(t, err, "Error when getting new host set service.") hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] @@ -1937,7 +1925,6 @@ func TestAddHostSetHosts(t *testing.T) { } func TestSetHostSetHosts(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1952,12 +1939,12 @@ func TestSetHostSetHosts(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } - s, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + s, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(t, err, "Error when getting new host set service.") hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] @@ -2055,7 +2042,6 @@ func TestSetHostSetHosts(t *testing.T) { } func TestRemoveHostSetHosts(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -2070,12 +2056,12 @@ func TestRemoveHostSetHosts(t *testing.T) { rw := db.New(conn) repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } plgRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } - s, err := host_sets.NewService(ctx, repoFn, plgRepoFn) + s, err := host_sets.NewService(repoFn, plgRepoFn) require.NoError(t, err, "Error when getting new host set service.") hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] diff --git a/internal/daemon/controller/handlers/hosts/host_service.go b/internal/daemon/controller/handlers/hosts/host_service.go index daf664594a1..9750afb814a 100644 --- a/internal/daemon/controller/handlers/hosts/host_service.go +++ b/internal/daemon/controller/handlers/hosts/host_service.go @@ -61,11 +61,7 @@ const domain = "host" func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.Host{}}, - handlers.MaskSource{&pb.Host{}, &pb.StaticHostAttributes{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.Host{}}, handlers.MaskSource{&pb.Host{}, &pb.StaticHostAttributes{}}); err != nil { panic(err) } } @@ -81,19 +77,19 @@ var _ pbs.HostServiceServer = (*Service)(nil) // NewService returns a host Service which handles host related requests to boundary and uses the provided // repositories for storage and retrieval. -func NewService(ctx context.Context, repoFn common.StaticRepoFactory, pluginRepoFn common.PluginHostRepoFactory) (Service, error) { +func NewService(repoFn common.StaticRepoFactory, pluginRepoFn common.PluginHostRepoFactory) (Service, error) { const op = "hosts.NewService" if repoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing static repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing static repository") } if pluginRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing plugin host repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing plugin host repository") } return Service{staticRepoFn: repoFn, pluginRepoFn: pluginRepoFn}, nil } func (s Service) ListHosts(ctx context.Context, req *pbs.ListHostsRequest) (*pbs.ListHostsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } _, authResults := s.parentAndAuthResult(ctx, req.GetHostCatalogId(), action.List) @@ -108,7 +104,7 @@ func (s Service) ListHosts(ctx context.Context, req *pbs.ListHostsRequest) (*pbs return &pbs.ListHostsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -345,7 +341,7 @@ func (s Service) createInRepo(ctx context.Context, projectId, catalogId string, if item.GetDescription() != nil { opts = append(opts, static.WithDescription(item.GetDescription().GetValue())) } - h, err := static.NewHost(ctx, catalogId, opts...) + h, err := static.NewHost(catalogId, opts...) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("Unable to build host for creation")) } @@ -377,7 +373,7 @@ func (s Service) updateInRepo(ctx context.Context, projectId, catalogId, id stri if addr := ha.GetAddress(); addr != nil { opts = append(opts, static.WithAddress(addr.GetValue())) } - h, err := static.NewHost(ctx, catalogId, opts...) + h, err := static.NewHost(catalogId, opts...) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("Unable to build host for update")) } @@ -723,12 +719,12 @@ func validateDeleteRequest(req *pbs.DeleteHostRequest) error { }, req, globals.StaticHostPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListHostsRequest) error { +func validateListRequest(req *pbs.ListHostsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetHostCatalogId()), globals.StaticHostCatalogPrefix, globals.PluginHostCatalogPrefix, globals.PluginHostCatalogPreviousPrefix) { badFields["host_catalog_id"] = "The field is incorrectly formatted." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/hosts/host_service_test.go b/internal/daemon/controller/handlers/hosts/host_service_test.go index fbf5181b3c3..bc43d095c43 100644 --- a/internal/daemon/controller/handlers/hosts/host_service_test.go +++ b/internal/daemon/controller/handlers/hosts/host_service_test.go @@ -49,7 +49,6 @@ var testAuthorizedActions = map[subtypes.Subtype][]string{ func TestGet_Static(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -64,10 +63,10 @@ func TestGet_Static(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] @@ -123,7 +122,7 @@ func TestGet_Static(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create a new host service.") got, gErr := s.GetHost(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), tc.req) @@ -142,7 +141,6 @@ func TestGet_Static(t *testing.T) { func TestGet_Plugin(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -162,10 +160,10 @@ func TestGet_Plugin(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) h := hostplugin.TestHost(t, conn, hc.GetPublicId(), "test", hostplugin.WithExternalName("test-ext-name")) @@ -240,7 +238,7 @@ func TestGet_Plugin(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create a new host service.") got, gErr := s.GetHost(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), tc.req) @@ -260,7 +258,6 @@ func TestGet_Plugin(t *testing.T) { } func TestList_Static(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -275,10 +272,10 @@ func TestList_Static(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hcs := static.TestCatalogs(t, conn, proj.GetPublicId(), 2) hc, hcNoHosts := hcs[0], hcs[1] @@ -349,7 +346,7 @@ func TestList_Static(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create new host set service.") // Test non-anonymous listing @@ -376,7 +373,6 @@ func TestList_Static(t *testing.T) { } func TestList_Plugin(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -395,10 +391,10 @@ func TestList_Plugin(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hcs := hostplugin.TestCatalogs(t, conn, proj.GetPublicId(), plg.GetPublicId(), 2) hc, hcNoHosts := hcs[0], hcs[1] @@ -472,7 +468,7 @@ func TestList_Plugin(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create new host set service.") // Test non-anonymous listing @@ -504,7 +500,6 @@ func TestList_Plugin(t *testing.T) { func TestDelete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -519,10 +514,10 @@ func TestDelete(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] @@ -531,7 +526,7 @@ func TestDelete(t *testing.T) { pluginHc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) pluginH := hostplugin.TestHost(t, conn, pluginHc.GetPublicId(), "test") - s, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(t, err, "Couldn't create a new host set service.") cases := []struct { @@ -588,7 +583,6 @@ func TestDelete(t *testing.T) { func TestDelete_twice(t *testing.T) { t.Parallel() - testCtx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -604,15 +598,15 @@ func TestDelete_twice(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(testCtx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(testCtx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] h := static.TestHosts(t, conn, hc.GetPublicId(), 1)[0] - s, err := hosts.NewService(testCtx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(err, "Couldn't create a new host set service.") req := &pbs.DeleteHostRequest{ Id: h.GetPublicId(), @@ -627,7 +621,6 @@ func TestDelete_twice(t *testing.T) { func TestCreate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -642,10 +635,10 @@ func TestCreate(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] @@ -824,7 +817,7 @@ func TestCreate(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + s, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(err, "Failed to create a new host set service.") got, gErr := s.CreateHost(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), tc.req) @@ -864,7 +857,6 @@ func TestCreate(t *testing.T) { func TestUpdate_Static(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -879,10 +871,10 @@ func TestUpdate_Static(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } repo, err := repoFn() require.NoError(t, err, "Couldn't create new static repo.") @@ -890,9 +882,9 @@ func TestUpdate_Static(t *testing.T) { hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] s := static.TestSets(t, conn, hc.GetPublicId(), 1)[0] - h, err := static.NewHost(ctx, hc.GetPublicId(), static.WithName("default"), static.WithDescription("default"), static.WithAddress("defaultaddress")) + h, err := static.NewHost(hc.GetPublicId(), static.WithName("default"), static.WithDescription("default"), static.WithAddress("defaultaddress")) require.NoError(t, err) - h, err = repo.CreateHost(ctx, proj.GetPublicId(), h) + h, err = repo.CreateHost(context.Background(), proj.GetPublicId(), h) require.NoError(t, err) static.TestSetMembers(t, conn, s.GetPublicId(), []*static.Host{h}) @@ -900,14 +892,14 @@ func TestUpdate_Static(t *testing.T) { resetHost := func() { version++ - _, _, err = repo.UpdateHost(ctx, proj.GetPublicId(), h, version, []string{"Name", "Description", "Address"}) + _, _, err = repo.UpdateHost(context.Background(), proj.GetPublicId(), h, version, []string{"Name", "Description", "Address"}) require.NoError(t, err, "Failed to reset host.") version++ } hCreated := h.GetCreateTime().GetTimestamp().AsTime() - tested, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + tested, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(t, err, "Failed to create a new host set service.") cases := []struct { @@ -1332,7 +1324,6 @@ func TestUpdate_Static(t *testing.T) { func TestUpdate_Plugin(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1347,17 +1338,17 @@ func TestUpdate_Plugin(t *testing.T) { rw := db.New(conn) sche := scheduler.TestScheduler(t, conn, wrapper) pluginRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } repoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } plg := plugin.TestPlugin(t, conn, "test") hc := hostplugin.TestCatalog(t, conn, proj.GetPublicId(), plg.GetPublicId()) h := hostplugin.TestHost(t, conn, hc.GetPublicId(), "test") - tested, err := hosts.NewService(ctx, repoFn, pluginRepoFn) + tested, err := hosts.NewService(repoFn, pluginRepoFn) require.NoError(t, err) got, err := tested.UpdateHost(auth.DisabledAuthTestContext(iamRepoFn, proj.GetPublicId()), &pbs.UpdateHostRequest{ diff --git a/internal/daemon/controller/handlers/managed_groups/managed_group_service.go b/internal/daemon/controller/handlers/managed_groups/managed_group_service.go index c6765f4ca5d..cf9f0b7ea2a 100644 --- a/internal/daemon/controller/handlers/managed_groups/managed_group_service.go +++ b/internal/daemon/controller/handlers/managed_groups/managed_group_service.go @@ -69,18 +69,10 @@ var ( func init() { var err error - if oidcMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&oidcstore.ManagedGroup{}}, - handlers.MaskSource{&pb.ManagedGroup{}, &pb.OidcManagedGroupAttributes{}}, - ); err != nil { + if oidcMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&oidcstore.ManagedGroup{}}, handlers.MaskSource{&pb.ManagedGroup{}, &pb.OidcManagedGroupAttributes{}}); err != nil { panic(err) } - if ldapMaskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&ldapstore.ManagedGroup{}}, - handlers.MaskSource{&pb.ManagedGroup{}, &pb.LdapManagedGroupAttributes{}}, - ); err != nil { + if ldapMaskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&ldapstore.ManagedGroup{}}, handlers.MaskSource{&pb.ManagedGroup{}, &pb.LdapManagedGroupAttributes{}}); err != nil { panic(err) } } @@ -109,7 +101,7 @@ func NewService(ctx context.Context, oidcRepo common.OidcAuthRepoFactory, ldapRe // ListManagedGroups implements the interface pbs.ManagedGroupsServiceServer. func (s Service) ListManagedGroups(ctx context.Context, req *pbs.ListManagedGroupsRequest) (*pbs.ListManagedGroupsResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } _, authResults := s.parentAndAuthResult(ctx, req.GetAuthMethodId(), action.List) @@ -124,7 +116,7 @@ func (s Service) ListManagedGroups(ctx context.Context, req *pbs.ListManagedGrou return &pbs.ListManagedGroupsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -174,7 +166,7 @@ func (s Service) ListManagedGroups(ctx context.Context, req *pbs.ListManagedGrou func (s Service) GetManagedGroup(ctx context.Context, req *pbs.GetManagedGroupRequest) (*pbs.GetManagedGroupResponse, error) { const op = "managed_groups.(Service).GetManagedGroup" - if err := validateGetRequest(ctx, req); err != nil { + if err := validateGetRequest(req); err != nil { return nil, err } @@ -216,7 +208,7 @@ func (s Service) GetManagedGroup(ctx context.Context, req *pbs.GetManagedGroupRe func (s Service) CreateManagedGroup(ctx context.Context, req *pbs.CreateManagedGroupRequest) (*pbs.CreateManagedGroupResponse, error) { const op = "managed_groups.(Service).CreateManagedGroup" - if err := validateCreateRequest(ctx, req); err != nil { + if err := validateCreateRequest(req); err != nil { return nil, err } @@ -255,7 +247,7 @@ func (s Service) CreateManagedGroup(ctx context.Context, req *pbs.CreateManagedG func (s Service) UpdateManagedGroup(ctx context.Context, req *pbs.UpdateManagedGroupRequest) (*pbs.UpdateManagedGroupResponse, error) { const op = "managed_groups.(Service).UpdateManagedGroup" - if err := validateUpdateRequest(ctx, req); err != nil { + if err := validateUpdateRequest(req); err != nil { return nil, err } @@ -292,7 +284,7 @@ func (s Service) UpdateManagedGroup(ctx context.Context, req *pbs.UpdateManagedG // DeleteManagedGroup implements the interface pbs.ManagedGroupServiceServer. func (s Service) DeleteManagedGroup(ctx context.Context, req *pbs.DeleteManagedGroupRequest) (*pbs.DeleteManagedGroupResponse, error) { - if err := validateDeleteRequest(ctx, req); err != nil { + if err := validateDeleteRequest(req); err != nil { return nil, err } _, authResults := s.parentAndAuthResult(ctx, req.GetId(), action.Delete) @@ -781,18 +773,18 @@ func toProto(ctx context.Context, in auth.ManagedGroup, opt ...handlers.Option) // - The path passed in is correctly formatted // - All required parameters are set // - There are no conflicting parameters provided -func validateGetRequest(ctx context.Context, req *pbs.GetManagedGroupRequest) error { +func validateGetRequest(req *pbs.GetManagedGroupRequest) error { const op = "managed_groups.validateGetRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateGetRequest(handlers.NoopValidatorFn, req, globals.OidcManagedGroupPrefix, globals.LdapManagedGroupPrefix) } -func validateCreateRequest(ctx context.Context, req *pbs.CreateManagedGroupRequest) error { +func validateCreateRequest(req *pbs.CreateManagedGroupRequest) error { const op = "managed_groups.validateCreateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateCreateRequest(req.GetItem(), func() map[string]string { badFields := map[string]string{} @@ -835,10 +827,10 @@ func validateCreateRequest(ctx context.Context, req *pbs.CreateManagedGroupReque }) } -func validateUpdateRequest(ctx context.Context, req *pbs.UpdateManagedGroupRequest) error { +func validateUpdateRequest(req *pbs.UpdateManagedGroupRequest) error { const op = "managed_groups.validateUpdateRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateUpdateRequest(req, req.GetItem(), func() map[string]string { badFields := map[string]string{} @@ -879,24 +871,24 @@ func validateUpdateRequest(ctx context.Context, req *pbs.UpdateManagedGroupReque }, globals.OidcManagedGroupPrefix, globals.LdapManagedGroupPrefix) } -func validateDeleteRequest(ctx context.Context, req *pbs.DeleteManagedGroupRequest) error { +func validateDeleteRequest(req *pbs.DeleteManagedGroupRequest) error { const op = "managed_groups.validateDeleteRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.OidcManagedGroupPrefix, globals.LdapManagedGroupPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListManagedGroupsRequest) error { +func validateListRequest(req *pbs.ListManagedGroupsRequest) error { const op = "managed_groups.validateListRequest" if req == nil { - return errors.New(ctx, errors.InvalidParameter, op, "nil request") + return errors.NewDeprecated(errors.InvalidParameter, op, "nil request") } badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetAuthMethodId()), globals.OidcAuthMethodPrefix, globals.LdapAuthMethodPrefix) { badFields[globals.AuthMethodIdField] = "Invalid formatted identifier." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields[globals.FilterField] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/managed_groups/managed_group_service_test.go b/internal/daemon/controller/handlers/managed_groups/managed_group_service_test.go index 393b59871d1..6906f3379f8 100644 --- a/internal/daemon/controller/handlers/managed_groups/managed_group_service_test.go +++ b/internal/daemon/controller/handlers/managed_groups/managed_group_service_test.go @@ -114,7 +114,7 @@ func TestGet(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -267,7 +267,7 @@ func TestListOidc(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -422,7 +422,7 @@ func TestListLdap(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -580,7 +580,7 @@ func TestDelete(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -677,7 +677,7 @@ func TestDelete_twice(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -719,7 +719,7 @@ func TestCreateOidc(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -924,7 +924,7 @@ func TestCreateLdap(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -1130,7 +1130,7 @@ func TestUpdateOidc(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) @@ -1518,7 +1518,7 @@ func TestUpdateLdap(t *testing.T) { return oidc.NewRepository(ctx, rw, rw, kmsCache) } iamRepoFn := func() (*iam.Repository, error) { - return iam.NewRepository(ctx, rw, rw, kmsCache) + return iam.NewRepository(rw, rw, kmsCache) } ldapRepoFn := func() (*ldap.Repository, error) { return ldap.NewRepository(ctx, rw, rw, kmsCache) diff --git a/internal/daemon/controller/handlers/managed_groups/validate_test.go b/internal/daemon/controller/handlers/managed_groups/validate_test.go index 06f1392d23d..8280826098d 100644 --- a/internal/daemon/controller/handlers/managed_groups/validate_test.go +++ b/internal/daemon/controller/handlers/managed_groups/validate_test.go @@ -4,7 +4,6 @@ package managed_groups import ( - "context" "fmt" "strings" "testing" @@ -128,7 +127,7 @@ func TestValidateCreateRequest(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() req := &pbs.CreateManagedGroupRequest{Item: tc.item} - err := validateCreateRequest(context.Background(), req) + err := validateCreateRequest(req) if tc.errContains == "" { require.NoError(t, err) return @@ -201,7 +200,7 @@ func TestValidateUpdateRequest(t *testing.T) { tc := tc // capture range variable t.Run(tc.name, func(t *testing.T) { t.Parallel() - err := validateUpdateRequest(context.Background(), tc.req) + err := validateUpdateRequest(tc.req) if tc.errContains == "" { require.NoError(t, err) return diff --git a/internal/daemon/controller/handlers/mask_manager.go b/internal/daemon/controller/handlers/mask_manager.go index 17c45e0da5a..36be77d7007 100644 --- a/internal/daemon/controller/handlers/mask_manager.go +++ b/internal/daemon/controller/handlers/mask_manager.go @@ -4,7 +4,6 @@ package handlers import ( - "context" "fmt" "strings" @@ -27,13 +26,13 @@ type ( // the first proto from all subsequent protos assuming they are both using the // mask_mapping custom option. Error is returned if no mappings are // found or if one of the passed protos has a mapping that doesn't reciprocate. -func NewMaskManager(ctx context.Context, dest MaskDestination, src MaskSource) (MaskManager, error) { +func NewMaskManager(dest MaskDestination, src MaskSource) (MaskManager, error) { const op = "handlers.NewMaskManager" - srcToDest, err := mapFromProto(ctx, src) + srcToDest, err := mapFromProto(src) if err != nil { return nil, err } - destToSrc, err := mapFromProto(ctx, dest) + destToSrc, err := mapFromProto(dest) if err != nil { return nil, err } @@ -42,7 +41,7 @@ func NewMaskManager(ctx context.Context, dest MaskDestination, src MaskSource) ( for k, v := range srcToDest { ov, ok := destToSrc[v] if !ok || ov != k { - return nil, errors.New(ctx, errors.Encode, op, fmt.Sprintf("mapping src field %q maps to %q, dest %q maps to %q", k, v, v, ov)) + return nil, errors.NewDeprecated(errors.Encode, op, fmt.Sprintf("mapping src field %q maps to %q, dest %q maps to %q", k, v, v, ov)) } result[k] = v } @@ -50,18 +49,18 @@ func NewMaskManager(ctx context.Context, dest MaskDestination, src MaskSource) ( // Now check to make sure there aren't any dangling dest mappings. for k, v := range destToSrc { if ov, ok := srcToDest[v]; !ok || ov != k { - return nil, errors.New(ctx, errors.Encode, op, fmt.Sprintf("mapping src field %q maps to %q, dest %q maps to %q", k, v, v, ov)) + return nil, errors.NewDeprecated(errors.Encode, op, fmt.Sprintf("mapping src field %q maps to %q, dest %q maps to %q", k, v, v, ov)) } } if len(result) == 0 { - return nil, errors.New(ctx, errors.InvalidParameter, op, "mask mapping generated is zero") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "mask mapping generated is zero") } return result, nil } -func mapFromProto(ctx context.Context, ps []protoreflect.ProtoMessage) (map[string]string, error) { +func mapFromProto(ps []protoreflect.ProtoMessage) (map[string]string, error) { const op = "handlers.mapFromProto" mapping := make(map[string]string) for _, p := range ps { @@ -72,7 +71,7 @@ func mapFromProto(ctx context.Context, ps []protoreflect.ProtoMessage) (map[stri opts := f.Options().(*descriptorpb.FieldOptions) if nameMap := proto.GetExtension(opts, pb.E_MaskMapping).(*pb.MaskMapping); !proto.Equal(nameMap, &pb.MaskMapping{}) && nameMap != nil { if _, ok := mapping[nameMap.GetThis()]; ok { - return nil, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("duplicate mapping from field %q with the mapping key %q", f.Name(), nameMap.GetThis())) + return nil, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("duplicate mapping from field %q with the mapping key %q", f.Name(), nameMap.GetThis())) } mapping[nameMap.GetThis()] = nameMap.GetThat() } diff --git a/internal/daemon/controller/handlers/mask_manager_test.go b/internal/daemon/controller/handlers/mask_manager_test.go index cfa3d9c0da1..195495c2f97 100644 --- a/internal/daemon/controller/handlers/mask_manager_test.go +++ b/internal/daemon/controller/handlers/mask_manager_test.go @@ -4,7 +4,6 @@ package handlers import ( - "context" "testing" pb "github.com/hashicorp/boundary/sdk/pbs/controller/protooptions" @@ -13,7 +12,7 @@ import ( ) func TestMaskManager(t *testing.T) { - mm, err := NewMaskManager(context.Background(), MaskDestination{&pb.TestProperlyNamedFields{}}, MaskSource{&pb.TestBase{}}) + mm, err := NewMaskManager(MaskDestination{&pb.TestProperlyNamedFields{}}, MaskSource{&pb.TestBase{}}) require.NoError(t, err) assert.Equal(t, []string(nil), mm.Translate([]string{"doesnt_exist"})) assert.Equal(t, []string{"OtherFirstField"}, mm.Translate([]string{"first_field"})) @@ -29,7 +28,7 @@ func TestMaskManager(t *testing.T) { } func TestMaskManager_Split(t *testing.T) { - mm, err := NewMaskManager(context.Background(), MaskDestination{&pb.TestProperlyNamedFields{}}, MaskSource{&pb.TestBaseSplit1{}, &pb.TestBaseSplit2{}}) + mm, err := NewMaskManager(MaskDestination{&pb.TestProperlyNamedFields{}}, MaskSource{&pb.TestBaseSplit1{}, &pb.TestBaseSplit2{}}) require.NoError(t, err) assert.Equal(t, []string(nil), mm.Translate([]string{"doesnt_exist"})) assert.Equal(t, []string{"OtherFirstField"}, mm.Translate([]string{"first_field"})) @@ -40,11 +39,10 @@ func TestMaskManager_Split(t *testing.T) { } func TestMaskManager_errors(t *testing.T) { - ctx := context.Background() - _, err := NewMaskManager(ctx, MaskDestination{&pb.TestBase{}}, MaskSource{&pb.TestManyToOneMappings{}}) + _, err := NewMaskManager(MaskDestination{&pb.TestBase{}}, MaskSource{&pb.TestManyToOneMappings{}}) assert.Error(t, err) - _, err = NewMaskManager(ctx, MaskDestination{&pb.TestBase{}}, MaskSource{&pb.TestNameDoesntMap{}}) + _, err = NewMaskManager(MaskDestination{&pb.TestBase{}}, MaskSource{&pb.TestNameDoesntMap{}}) assert.Error(t, err) - _, err = NewMaskManager(ctx, MaskDestination{&pb.TestBase{}}, MaskSource{&pb.TestNotEnoughFields{}}) + _, err = NewMaskManager(MaskDestination{&pb.TestBase{}}, MaskSource{&pb.TestNotEnoughFields{}}) assert.Error(t, err) } diff --git a/internal/daemon/controller/handlers/roles/role_service.go b/internal/daemon/controller/handlers/roles/role_service.go index cba03b170a2..766a8e7f98f 100644 --- a/internal/daemon/controller/handlers/roles/role_service.go +++ b/internal/daemon/controller/handlers/roles/role_service.go @@ -22,7 +22,6 @@ import ( "github.com/hashicorp/boundary/internal/types/resource" "github.com/hashicorp/boundary/internal/types/scope" pb "github.com/hashicorp/boundary/sdk/pbs/controller/api/resources/roles" - "github.com/hashicorp/boundary/version" "github.com/hashicorp/go-secure-stdlib/strutil" "google.golang.org/grpc/codes" "google.golang.org/protobuf/types/known/wrapperspb" @@ -56,11 +55,7 @@ var ( func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.Role{}}, - handlers.MaskSource{&pb.Role{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.Role{}}, handlers.MaskSource{&pb.Role{}}); err != nil { panic(err) } } @@ -75,17 +70,17 @@ type Service struct { var _ pbs.RoleServiceServer = (*Service)(nil) // NewService returns a role service which handles role related requests to boundary. -func NewService(ctx context.Context, repo common.IamRepoFactory) (Service, error) { +func NewService(repo common.IamRepoFactory) (Service, error) { const op = "roles.NewService" if repo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } return Service{repoFn: repo}, nil } // ListRoles implements the interface pbs.RoleServiceServer. func (s Service) ListRoles(ctx context.Context, req *pbs.ListRolesRequest) (*pbs.ListRolesResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -120,7 +115,7 @@ func (s Service) ListRoles(ctx context.Context, req *pbs.ListRolesRequest) (*pbs return &pbs.ListRolesResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -406,7 +401,7 @@ func (s Service) RemoveRolePrincipals(ctx context.Context, req *pbs.RemoveRolePr func (s Service) AddRoleGrants(ctx context.Context, req *pbs.AddRoleGrantsRequest) (*pbs.AddRoleGrantsResponse, error) { const op = "roles.(Service).AddRoleGrants" - if err := validateAddRoleGrantsRequest(ctx, req); err != nil { + if err := validateAddRoleGrantsRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetId(), action.AddGrants) @@ -444,7 +439,7 @@ func (s Service) AddRoleGrants(ctx context.Context, req *pbs.AddRoleGrantsReques func (s Service) SetRoleGrants(ctx context.Context, req *pbs.SetRoleGrantsRequest) (*pbs.SetRoleGrantsResponse, error) { const op = "roles.(Service).SetRoleGrants" - if err := validateSetRoleGrantsRequest(ctx, req); err != nil { + if err := validateSetRoleGrantsRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetId(), action.SetGrants) @@ -482,7 +477,7 @@ func (s Service) SetRoleGrants(ctx context.Context, req *pbs.SetRoleGrantsReques func (s Service) RemoveRoleGrants(ctx context.Context, req *pbs.RemoveRoleGrantsRequest) (*pbs.RemoveRoleGrantsResponse, error) { const op = "roles.(Service).RemoveRoleGrants" - if err := validateRemoveRoleGrantsRequest(ctx, req); err != nil { + if err := validateRemoveRoleGrantsRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetId(), action.RemoveGrants) @@ -546,7 +541,7 @@ func (s Service) createInRepo(ctx context.Context, scopeId string, item *pb.Role if item.GetGrantScopeId() != nil { opts = append(opts, iam.WithGrantScopeId(item.GetGrantScopeId().GetValue())) } - u, err := iam.NewRole(ctx, scopeId, opts...) + u, err := iam.NewRole(scopeId, opts...) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build role for creation: %v.", err) } @@ -578,7 +573,7 @@ func (s Service) updateInRepo(ctx context.Context, scopeId, id string, mask []st } version := item.GetVersion() - u, err := iam.NewRole(ctx, scopeId, opts...) + u, err := iam.NewRole(scopeId, opts...) if err != nil { return nil, nil, nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build role for update: %v.", err) } @@ -858,7 +853,7 @@ func toProto(ctx context.Context, in *iam.Role, principals []*iam.PrincipalRole, } if outputFields.Has(globals.GrantsField) { for _, g := range grants { - parsed, err := perms.Parse(ctx, in.GetGrantScopeId(), g.GetRawGrant()) + parsed, err := perms.Parse(in.GetGrantScopeId(), g.GetRawGrant()) if err != nil { // This should never happen as we validate on the way in, but let's // return what we can since we are still returning the raw grant @@ -874,7 +869,6 @@ func toProto(ctx context.Context, in *iam.Role, principals []*iam.PrincipalRole, Canonical: g.GetCanonicalGrant(), Json: &pb.GrantJson{ Id: parsed.Id(), - Ids: parsed.Ids(), Type: parsed.Type().String(), Actions: actions, }, @@ -949,14 +943,14 @@ func validateDeleteRequest(req *pbs.DeleteRoleRequest) error { }, req, globals.RolePrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListRolesRequest) error { +func validateListRequest(req *pbs.ListRolesRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Org.Prefix()) && !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Project.Prefix()) && req.GetScopeId() != scope.Global.String() { badFields["scope_id"] = "Improperly formatted field." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { @@ -979,8 +973,7 @@ func validateAddRolePrincipalsRequest(req *pbs.AddRolePrincipalsRequest) error { for _, id := range req.GetPrincipalIds() { if !handlers.ValidId(handlers.Id(id), globals.GroupPrefix) && !handlers.ValidId(handlers.Id(id), globals.UserPrefix) && - !handlers.ValidId(handlers.Id(id), globals.OidcManagedGroupPrefix) && - !handlers.ValidId(handlers.Id(id), globals.LdapManagedGroupPrefix) { + !handlers.ValidId(handlers.Id(id), globals.OidcManagedGroupPrefix) { badFields["principal_ids"] = "Must only have valid user, group, and/or managed group ids." break } @@ -1006,8 +999,7 @@ func validateSetRolePrincipalsRequest(req *pbs.SetRolePrincipalsRequest) error { for _, id := range req.GetPrincipalIds() { if !handlers.ValidId(handlers.Id(id), globals.GroupPrefix) && !handlers.ValidId(handlers.Id(id), globals.UserPrefix) && - !handlers.ValidId(handlers.Id(id), globals.OidcManagedGroupPrefix) && - !handlers.ValidId(handlers.Id(id), globals.LdapManagedGroupPrefix) { + !handlers.ValidId(handlers.Id(id), globals.OidcManagedGroupPrefix) { badFields["principal_ids"] = "Must only have valid user, group, and/or managed group ids." break } @@ -1036,8 +1028,7 @@ func validateRemoveRolePrincipalsRequest(req *pbs.RemoveRolePrincipalsRequest) e for _, id := range req.GetPrincipalIds() { if !handlers.ValidId(handlers.Id(id), globals.GroupPrefix) && !handlers.ValidId(handlers.Id(id), globals.UserPrefix) && - !handlers.ValidId(handlers.Id(id), globals.OidcManagedGroupPrefix) && - !handlers.ValidId(handlers.Id(id), globals.LdapManagedGroupPrefix) { + !handlers.ValidId(handlers.Id(id), globals.OidcManagedGroupPrefix) { badFields["principal_ids"] = "Must only have valid user, group, and/or managed group ids." break } @@ -1048,7 +1039,7 @@ func validateRemoveRolePrincipalsRequest(req *pbs.RemoveRolePrincipalsRequest) e return nil } -func validateAddRoleGrantsRequest(ctx context.Context, req *pbs.AddRoleGrantsRequest) error { +func validateAddRoleGrantsRequest(req *pbs.AddRoleGrantsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetId()), globals.RolePrefix) { badFields["id"] = "Incorrectly formatted identifier." @@ -1064,7 +1055,7 @@ func validateAddRoleGrantsRequest(ctx context.Context, req *pbs.AddRoleGrantsReq badFields["grant_strings"] = "Grant strings must not be empty." break } - grant, err := perms.Parse(ctx, "p_anything", v) + grant, err := perms.Parse("p_anything", v) if err != nil { badFields["grant_strings"] = fmt.Sprintf("Improperly formatted grant %q.", v) break @@ -1075,14 +1066,6 @@ func validateAddRoleGrantsRequest(ctx context.Context, req *pbs.AddRoleGrantsReq badFields["grant_strings"] = fmt.Sprintf("Action %q has been deprecated and is not allowed to be set in grants. Use %q instead.", actStr, depAct.String()) } } - switch { - case grant.Id() == "": - // Nothing - case version.SupportsFeature(version.Binary, version.SupportIdInGrants): - // This will warn on the CLI - default: - badFields["grant_strings"] = fmt.Sprintf("Grant %q uses the %q field which is no longer supported. Please use %q instead.", v, "id", "ids") - } } if len(badFields) > 0 { return handlers.InvalidArgumentErrorf("Errors in provided fields.", badFields) @@ -1090,7 +1073,7 @@ func validateAddRoleGrantsRequest(ctx context.Context, req *pbs.AddRoleGrantsReq return nil } -func validateSetRoleGrantsRequest(ctx context.Context, req *pbs.SetRoleGrantsRequest) error { +func validateSetRoleGrantsRequest(req *pbs.SetRoleGrantsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetId()), globals.RolePrefix) { badFields["id"] = "Incorrectly formatted identifier." @@ -1103,7 +1086,7 @@ func validateSetRoleGrantsRequest(ctx context.Context, req *pbs.SetRoleGrantsReq badFields["grant_strings"] = "Grant strings must not be empty." break } - grant, err := perms.Parse(ctx, "p_anything", v) + grant, err := perms.Parse("p_anything", v) if err != nil { badFields["grant_strings"] = fmt.Sprintf("Improperly formatted grant %q.", v) break @@ -1114,14 +1097,6 @@ func validateSetRoleGrantsRequest(ctx context.Context, req *pbs.SetRoleGrantsReq badFields["grant_strings"] = fmt.Sprintf("Action %q has been deprecated and is not allowed to be set in grants. Use %q instead.", actStr, depAct.String()) } } - switch { - case grant.Id() == "": - // Nothing - case version.SupportsFeature(version.Binary, version.SupportIdInGrants): - // This will warn on the CLI - default: - badFields["grant_strings"] = fmt.Sprintf("Grant %q uses the %q field which is no longer supported. Please use %q instead.", v, "id", "ids") - } } if len(badFields) > 0 { return handlers.InvalidArgumentErrorf("Errors in provided fields.", badFields) @@ -1129,7 +1104,7 @@ func validateSetRoleGrantsRequest(ctx context.Context, req *pbs.SetRoleGrantsReq return nil } -func validateRemoveRoleGrantsRequest(ctx context.Context, req *pbs.RemoveRoleGrantsRequest) error { +func validateRemoveRoleGrantsRequest(req *pbs.RemoveRoleGrantsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetId()), globals.RolePrefix) { badFields["id"] = "Incorrectly formatted identifier." @@ -1145,7 +1120,7 @@ func validateRemoveRoleGrantsRequest(ctx context.Context, req *pbs.RemoveRoleGra badFields["grant_strings"] = "Grant strings must not be empty." break } - if _, err := perms.Parse(ctx, "p_anything", v); err != nil { + if _, err := perms.Parse("p_anything", v); err != nil { badFields["grant_strings"] = fmt.Sprintf("Improperly formatted grant %q.", v) break } diff --git a/internal/daemon/controller/handlers/roles/role_service_test.go b/internal/daemon/controller/handlers/roles/role_service_test.go index 4a208422e6f..d68f25b3b97 100644 --- a/internal/daemon/controller/handlers/roles/role_service_test.go +++ b/internal/daemon/controller/handlers/roles/role_service_test.go @@ -12,7 +12,6 @@ import ( "github.com/google/go-cmp/cmp" "github.com/hashicorp/boundary/globals" - "github.com/hashicorp/boundary/internal/auth/ldap" "github.com/hashicorp/boundary/internal/auth/oidc" "github.com/hashicorp/boundary/internal/daemon/controller/auth" "github.com/hashicorp/boundary/internal/daemon/controller/handlers" @@ -25,7 +24,6 @@ import ( "github.com/hashicorp/boundary/internal/types/scope" pb "github.com/hashicorp/boundary/sdk/pbs/controller/api/resources/roles" "github.com/hashicorp/boundary/sdk/pbs/controller/api/resources/scopes" - "github.com/hashicorp/boundary/version" "github.com/kr/pretty" "google.golang.org/genproto/protobuf/field_mask" "google.golang.org/grpc/codes" @@ -177,7 +175,7 @@ func TestGet(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetRoleRequest) proto.Merge(req, tc.req) - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(err, "Couldn't create new role service.") got, gErr := s.GetRole(auth.DisabledAuthTestContext(repoFn, tc.scopeId), req) @@ -300,7 +298,7 @@ func TestList(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(err, "Couldn't create new role service.") // Test the non-anon case @@ -333,7 +331,7 @@ func TestList(t *testing.T) { func TestDelete(t *testing.T) { or, pr, repoFn := createDefaultRolesAndRepo(t) - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") cases := []struct { @@ -399,7 +397,7 @@ func TestDelete_twice(t *testing.T) { assert, require := assert.New(t), require.New(t) or, pr, repoFn := createDefaultRolesAndRepo(t) - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(err, "Error when getting new role service") req := &pbs.DeleteRoleRequest{ Id: or.GetPublicId(), @@ -544,7 +542,7 @@ func TestCreate(t *testing.T) { req := proto.Clone(toMerge).(*pbs.CreateRoleRequest) proto.Merge(req, tc.req) - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(err, "Error when getting new role service.") got, gErr := s.CreateRole(auth.DisabledAuthTestContext(repoFn, tc.req.GetItem().GetScopeId()), req) @@ -572,9 +570,8 @@ func TestCreate(t *testing.T) { } func TestUpdate(t *testing.T) { - ctx := context.Background() grantString := "id=*;type=*;actions=*" - g, err := perms.Parse(context.Background(), "global", grantString) + g, err := perms.Parse("global", grantString) require.NoError(t, err) _, actions := g.Actions() grant := &pb.Grant{ @@ -582,7 +579,6 @@ func TestUpdate(t *testing.T) { Canonical: g.CanonicalString(), Json: &pb.GrantJson{ Id: g.Id(), - Ids: g.Ids(), Type: g.Type().String(), Actions: actions, }, @@ -614,7 +610,7 @@ func TestUpdate(t *testing.T) { var orVersion uint32 = 1 var prVersion uint32 = 1 - tested, err := roles.NewService(ctx, repoFn) + tested, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") resetRoles := func(proj bool) { @@ -1015,7 +1011,6 @@ func TestUpdate(t *testing.T) { } func TestAddPrincipal(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) @@ -1023,9 +1018,10 @@ func TestAddPrincipal(t *testing.T) { return iamRepo, nil } o, p := iam.TestScopes(t, iamRepo) - s, err := roles.NewService(ctx, repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") + ctx := context.Background() kmsCache := kms.TestKms(t, conn, wrap) databaseWrapper, err := kmsCache.GetWrapper(ctx, o.PublicId, kms.KeyPurposeDatabase) require.NoError(t, err) @@ -1038,9 +1034,6 @@ func TestAddPrincipal(t *testing.T) { oidc.WithApiUrl(oidc.TestConvertToUrls(t, "https://www.alice.com/callback")[0]), ) - ldapAuthMethod := ldap.TestAuthMethod(t, conn, databaseWrapper, o.PublicId, []string{"ldaps://ldap1"}) - ldapManagedGroup := ldap.TestManagedGroup(t, conn, ldapAuthMethod, []string{"admin"}) - users := []*iam.User{ iam.TestUser(t, iamRepo, o.GetPublicId()), iam.TestUser(t, iamRepo, o.GetPublicId()), @@ -1134,14 +1127,6 @@ func TestAddPrincipal(t *testing.T) { addManagedGroups: []string{managedGroups[1].GetPublicId(), managedGroups[1].GetPublicId()}, resultManagedGroups: []string{managedGroups[0].GetPublicId(), managedGroups[1].GetPublicId()}, }, - { - name: "Add ldap managed group on populated role", - setup: func(r *iam.Role) { - iam.TestManagedGroupRole(t, conn, r.GetPublicId(), managedGroups[0].GetPublicId()) - }, - addManagedGroups: []string{ldapManagedGroup.GetPublicId()}, - resultManagedGroups: []string{managedGroups[0].GetPublicId(), ldapManagedGroup.GetPublicId()}, - }, { name: "Add invalid u_recovery on role", setup: func(r *iam.Role) {}, @@ -1228,7 +1213,7 @@ func TestSetPrincipal(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") o, p := iam.TestScopes(t, iamRepo) @@ -1246,9 +1231,6 @@ func TestSetPrincipal(t *testing.T) { oidc.WithApiUrl(oidc.TestConvertToUrls(t, "https://www.alice.com/callback")[0]), ) - ldapAuthMethod := ldap.TestAuthMethod(t, conn, databaseWrapper, o.PublicId, []string{"ldaps://ldap1"}) - ldapManagedGroup := ldap.TestManagedGroup(t, conn, ldapAuthMethod, []string{"admin"}) - users := []*iam.User{ iam.TestUser(t, iamRepo, o.GetPublicId()), iam.TestUser(t, iamRepo, o.GetPublicId()), @@ -1335,14 +1317,6 @@ func TestSetPrincipal(t *testing.T) { setManagedGroups: []string{managedGroups[1].GetPublicId()}, resultManagedGroups: []string{managedGroups[1].GetPublicId()}, }, - { - name: "Set LDAP managed group on populated role", - setup: func(r *iam.Role) { - iam.TestManagedGroupRole(t, conn, r.GetPublicId(), managedGroups[0].GetPublicId()) - }, - setManagedGroups: []string{ldapManagedGroup.GetPublicId()}, - resultManagedGroups: []string{ldapManagedGroup.GetPublicId()}, - }, { name: "Set invalid u_recovery on role", setup: func(r *iam.Role) {}, @@ -1423,18 +1397,18 @@ func TestSetPrincipal(t *testing.T) { } func TestRemovePrincipal(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := roles.NewService(ctx, repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") o, p := iam.TestScopes(t, iamRepo) + ctx := context.Background() kmsCache := kms.TestKms(t, conn, wrap) databaseWrapper, err := kmsCache.GetWrapper(ctx, o.PublicId, kms.KeyPurposeDatabase) require.NoError(t, err) @@ -1447,9 +1421,6 @@ func TestRemovePrincipal(t *testing.T) { oidc.WithApiUrl(oidc.TestConvertToUrls(t, "https://www.alice.com/callback")[0]), ) - ldapAuthMethod := ldap.TestAuthMethod(t, conn, databaseWrapper, o.PublicId, []string{"ldaps://ldap1"}) - ldapManagedGroup := ldap.TestManagedGroup(t, conn, ldapAuthMethod, []string{"admin"}) - users := []*iam.User{ iam.TestUser(t, iamRepo, o.GetPublicId()), iam.TestUser(t, iamRepo, o.GetPublicId()), @@ -1583,15 +1554,6 @@ func TestRemovePrincipal(t *testing.T) { removeManagedGroups: []string{managedGroups[0].GetPublicId(), managedGroups[1].GetPublicId()}, resultManagedGroups: []string{}, }, - { - name: "Remove LDAP managed groups from role", - setup: func(r *iam.Role) { - iam.TestManagedGroupRole(t, conn, r.GetPublicId(), ldapManagedGroup.GetPublicId()) - iam.TestManagedGroupRole(t, conn, r.GetPublicId(), managedGroups[0].GetPublicId()) - }, - removeManagedGroups: []string{ldapManagedGroup.GetPublicId()}, - resultManagedGroups: []string{managedGroups[0].GetPublicId()}, - }, } for _, tc := range addCases { @@ -1670,7 +1632,7 @@ func checkEqualGrants(t *testing.T, expected []string, got *pb.Role) { require.Equal(len(expected), len(got.GrantStrings)) require.Equal(len(expected), len(got.Grants)) for i, v := range expected { - parsed, err := perms.Parse(context.Background(), "o_abc123", v) + parsed, err := perms.Parse("o_abc123", v) require.NoError(err) assert.Equal(expected[i], got.GrantStrings[i]) assert.Equal(expected[i], got.Grants[i].GetRaw()) @@ -1678,7 +1640,6 @@ func checkEqualGrants(t *testing.T, expected []string, got *pb.Role) { j := got.Grants[i].GetJson() require.NotNil(j) assert.Equal(parsed.Id(), j.GetId()) - assert.Equal(parsed.Ids(), j.GetIds()) assert.Equal(parsed.Type().String(), j.GetType()) _, acts := parsed.Actions() assert.Equal(acts, j.GetActions()) @@ -1692,7 +1653,7 @@ func TestAddGrants(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") addCases := []struct { @@ -1705,43 +1666,34 @@ func TestAddGrants(t *testing.T) { }{ { name: "Add grant on empty role", - add: []string{"ids=*;type=*;actions=delete"}, - result: []string{"ids=*;type=*;actions=delete"}, + add: []string{"id=*;type=*;actions=delete"}, + result: []string{"id=*;type=*;actions=delete"}, }, { name: "Add grant on role with grant", existing: []string{"id=u_foo;actions=read"}, - add: []string{"ids=*;type=*;actions=delete"}, - result: []string{"id=u_foo;actions=read", "ids=*;type=*;actions=delete"}, + add: []string{"id=*;type=*;actions=delete"}, + result: []string{"id=u_foo;actions=read", "id=*;type=*;actions=delete"}, }, { name: "Add duplicate grant on role with grant", existing: []string{"id=u_fooaA1;actions=read"}, - add: []string{"ids=*;type=*;actions=delete", "ids=*;type=*;actions=delete"}, - result: []string{"id=u_fooaA1;actions=read", "ids=*;type=*;actions=delete"}, + add: []string{"id=*;type=*;actions=delete", "id=*;type=*;actions=delete"}, + result: []string{"id=u_fooaA1;actions=read", "id=*;type=*;actions=delete"}, }, { name: "Add grant matching existing grant", - existing: []string{"ids=u_foo;actions=read", "ids=*;type=*;actions=delete"}, - add: []string{"ids=*;type=*;actions=delete"}, + existing: []string{"id=u_foo;actions=read", "id=*;type=*;actions=delete"}, + add: []string{"id=*;type=*;actions=delete"}, wantErr: true, }, { - name: "Check add-host-sets deprecation", + name: "Check deprecation", existing: []string{"id=u_foo;actions=read", "id=*;type=*;actions=delete"}, - add: []string{"ids=*;type=target;actions=add-host-sets"}, + add: []string{"id=*;type=target;actions=add-host-sets"}, wantErr: true, wantErrContains: "Use \\\"add-host-sources\\\" instead", }, - { - name: "Check id field deprecation", - existing: []string{"id=u_fooaA1;actions=read"}, - add: []string{"id=*;type=*;actions=delete"}, - result: []string{"id=u_fooaA1;actions=read", "id=*;type=*;actions=delete"}, - wantErr: func() bool { - return !version.SupportsFeature(version.Binary, version.SupportIdInGrants) - }(), - }, } for _, tc := range addCases { @@ -1787,7 +1739,7 @@ func TestAddGrants(t *testing.T) { name: "Bad Version", req: &pbs.AddRoleGrantsRequest{ Id: role.GetPublicId(), - GrantStrings: []string{"ids=*;type=*;actions=create"}, + GrantStrings: []string{"id=*;type=*;actions=create"}, Version: role.GetVersion() + 2, }, err: handlers.ApiErrorWithCode(codes.Internal), @@ -1796,7 +1748,7 @@ func TestAddGrants(t *testing.T) { name: "Bad Role Id", req: &pbs.AddRoleGrantsRequest{ Id: "bad id", - GrantStrings: []string{"ids=*;type=*;actions=create"}, + GrantStrings: []string{"id=*;type=*;actions=create"}, Version: role.GetVersion(), }, err: handlers.ApiErrorWithCode(codes.InvalidArgument), @@ -1805,7 +1757,7 @@ func TestAddGrants(t *testing.T) { name: "Unparseable Grant", req: &pbs.AddRoleGrantsRequest{ Id: role.GetPublicId(), - GrantStrings: []string{"ids=*;type=*;actions=create", "unparseable"}, + GrantStrings: []string{"id=*;type=*;actions=create", "unparseable"}, Version: role.GetVersion(), }, err: handlers.ApiErrorWithCode(codes.InvalidArgument), @@ -1814,7 +1766,7 @@ func TestAddGrants(t *testing.T) { name: "Empty Grant", req: &pbs.AddRoleGrantsRequest{ Id: role.GetPublicId(), - GrantStrings: []string{"ids=*;type=*;actions=create", ""}, + GrantStrings: []string{"id=*;type=*;actions=create", ""}, Version: role.GetVersion(), }, err: handlers.ApiErrorWithCode(codes.InvalidArgument), @@ -1840,7 +1792,7 @@ func TestSetGrants(t *testing.T) { return iamRepo, nil } - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") setCases := []struct { @@ -1853,26 +1805,26 @@ func TestSetGrants(t *testing.T) { }{ { name: "Set grant on empty role", - set: []string{"ids=*;type=*;actions=delete"}, - result: []string{"ids=*;type=*;actions=delete"}, + set: []string{"id=*;type=*;actions=delete"}, + result: []string{"id=*;type=*;actions=delete"}, }, { name: "Set grant on role with grant", existing: []string{"id=u_foo;actions=read"}, - set: []string{"ids=*;type=*;actions=delete"}, - result: []string{"ids=*;type=*;actions=delete"}, + set: []string{"id=*;type=*;actions=delete"}, + result: []string{"id=*;type=*;actions=delete"}, }, { name: "Set grant matching existing grant", existing: []string{"id=u_foo;actions=read", "id=*;type=*;actions=delete"}, - set: []string{"ids=*;type=*;actions=delete"}, - result: []string{"ids=*;type=*;actions=delete"}, + set: []string{"id=*;type=*;actions=delete"}, + result: []string{"id=*;type=*;actions=delete"}, }, { name: "Set duplicate grant matching existing grant", existing: []string{"id=u_foo;actions=read", "id=*;type=*;actions=delete"}, - set: []string{"ids=*;type=*;actions=delete", "ids=*;type=*;actions=delete"}, - result: []string{"ids=*;type=*;actions=delete"}, + set: []string{"id=*;type=*;actions=delete", "id=*;type=*;actions=delete"}, + result: []string{"id=*;type=*;actions=delete"}, }, { name: "Set empty on role", @@ -1881,21 +1833,12 @@ func TestSetGrants(t *testing.T) { result: nil, }, { - name: "Check add-host-sets deprecation", + name: "Check deprecation", existing: []string{"id=u_foo;actions=read", "id=*;type=*;actions=delete"}, - set: []string{"ids=*;type=target;actions=add-host-sets"}, + set: []string{"id=*;type=target;actions=add-host-sets"}, wantErr: true, wantErrContains: "Use \\\"add-host-sources\\\" instead", }, - { - name: "Check id field deprecation", - existing: []string{"id=u_fooaA1;actions=read"}, - set: []string{"id=*;type=*;actions=delete"}, - result: []string{"id=*;type=*;actions=delete"}, - wantErr: func() bool { - return !version.SupportsFeature(version.Binary, version.SupportIdInGrants) - }(), - }, } for _, tc := range setCases { @@ -1942,7 +1885,7 @@ func TestSetGrants(t *testing.T) { name: "Bad Version", req: &pbs.SetRoleGrantsRequest{ Id: role.GetPublicId(), - GrantStrings: []string{"ids=*;type=*;actions=create"}, + GrantStrings: []string{"id=*;type=*;actions=create"}, Version: role.GetVersion() + 2, }, err: handlers.ApiErrorWithCode(codes.Internal), @@ -1951,7 +1894,7 @@ func TestSetGrants(t *testing.T) { name: "Bad Role Id", req: &pbs.SetRoleGrantsRequest{ Id: "bad id", - GrantStrings: []string{"ids=*;type=*;actions=create"}, + GrantStrings: []string{"id=*;type=*;actions=create"}, Version: role.GetVersion(), }, err: handlers.ApiErrorWithCode(codes.InvalidArgument), @@ -1960,7 +1903,7 @@ func TestSetGrants(t *testing.T) { name: "Unparsable grant", req: &pbs.SetRoleGrantsRequest{ Id: role.GetPublicId(), - GrantStrings: []string{"ids=*;type=*;actions=create", "unparseable"}, + GrantStrings: []string{"id=*;type=*;actions=create", "unparseable"}, Version: role.GetVersion(), }, err: handlers.ApiErrorWithCode(codes.InvalidArgument), @@ -1985,7 +1928,7 @@ func TestRemoveGrants(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := roles.NewService(context.Background(), repoFn) + s, err := roles.NewService(repoFn) require.NoError(t, err, "Error when getting new role service.") removeCases := []struct { diff --git a/internal/daemon/controller/handlers/scopes/scope_service.go b/internal/daemon/controller/handlers/scopes/scope_service.go index 7297df63d23..8a292b54362 100644 --- a/internal/daemon/controller/handlers/scopes/scope_service.go +++ b/internal/daemon/controller/handlers/scopes/scope_service.go @@ -105,11 +105,7 @@ var ( func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.Scope{}}, - handlers.MaskSource{&pb.Scope{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.Scope{}}, handlers.MaskSource{&pb.Scope{}}); err != nil { panic(err) } } @@ -141,7 +137,7 @@ func (s Service) ListScopes(ctx context.Context, req *pbs.ListScopesRequest) (*p if req.GetScopeId() == "" { req.ScopeId = scope.Global.String() } - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -176,7 +172,7 @@ func (s Service) ListScopes(ctx context.Context, req *pbs.ListScopesRequest) (*p return &pbs.ListScopesResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -548,9 +544,9 @@ func (s Service) createInRepo(ctx context.Context, authResults auth.VerifyResult var err error switch parentScope.GetType() { case scope.Global.String(): - iamScope, err = iam.NewOrg(ctx, opts...) + iamScope, err = iam.NewOrg(opts...) case scope.Org.String(): - iamScope, err = iam.NewProject(ctx, parentScope.GetId(), opts...) + iamScope, err = iam.NewProject(parentScope.GetId(), opts...) } if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build new scope for creation: %v.", err) @@ -603,9 +599,9 @@ func (s Service) updateInRepo(ctx context.Context, parentScope *pb.ScopeInfo, sc iamScope.Name = scopeName iamScope.PrimaryAuthMethodId = scopePrimaryAuthMethodId case parentScope.GetType() == scope.Global.String(): - iamScope, err = iam.NewOrg(ctx, opts...) + iamScope, err = iam.NewOrg(opts...) case parentScope.GetType() == scope.Org.String(): - iamScope, err = iam.NewProject(ctx, parentScope.GetId(), opts...) + iamScope, err = iam.NewProject(parentScope.GetId(), opts...) } if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build scope for update: %v.", err) @@ -981,12 +977,12 @@ func validateDeleteRequest(req *pbs.DeleteScopeRequest) error { return nil } -func validateListRequest(ctx context.Context, req *pbs.ListScopesRequest) error { +func validateListRequest(req *pbs.ListScopesRequest) error { badFields := map[string]string{} if req.GetScopeId() != scope.Global.String() && !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Org.Prefix()) { badFields["scope_id"] = "Must be 'global' or a valid org scope id when listing." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/scopes/scope_service_test.go b/internal/daemon/controller/handlers/scopes/scope_service_test.go index c796567067d..aa0dd29744a 100644 --- a/internal/daemon/controller/handlers/scopes/scope_service_test.go +++ b/internal/daemon/controller/handlers/scopes/scope_service_test.go @@ -335,7 +335,6 @@ func TestGet(t *testing.T) { } func TestList(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) @@ -430,9 +429,9 @@ func TestList(t *testing.T) { var wantOrgs []*pb.Scope for i := 0; i < 10; i++ { - newO, err := iam.NewOrg(ctx) + newO, err := iam.NewOrg() require.NoError(t, err) - o, err := repo.CreateScope(ctx, newO, "") + o, err := repo.CreateScope(context.Background(), newO, "") require.NoError(t, err) wantOrgs = append(wantOrgs, &pb.Scope{ Id: o.GetPublicId(), @@ -451,9 +450,9 @@ func TestList(t *testing.T) { var wantProjects []*pb.Scope for i := 0; i < 10; i++ { - newP, err := iam.NewProject(ctx, oWithProjects.GetPublicId()) + newP, err := iam.NewProject(oWithProjects.GetPublicId()) require.NoError(t, err) - p, err := repo.CreateScope(ctx, newP, "") + p, err := repo.CreateScope(context.Background(), newP, "") require.NoError(t, err) wantProjects = append(wantProjects, &pb.Scope{ Id: p.GetPublicId(), @@ -652,11 +651,11 @@ func TestCreate(t *testing.T) { repo, err := repoFn() require.NoError(t, err) - globalUser, err := iam.NewUser(ctx, scope.Global.String()) + globalUser, err := iam.NewUser(scope.Global.String()) require.NoError(t, err) globalUser, err = repo.CreateUser(ctx, globalUser) require.NoError(t, err) - orgUser, err := iam.NewUser(ctx, defaultOrg.GetPublicId()) + orgUser, err := iam.NewUser(defaultOrg.GetPublicId()) require.NoError(t, err) orgUser, err = repo.CreateUser(ctx, orgUser) require.NoError(t, err) diff --git a/internal/daemon/controller/handlers/sessions/session_list_benchmarks_test.go b/internal/daemon/controller/handlers/sessions/session_list_benchmarks_test.go index b6107820e15..6cb96e6b3df 100644 --- a/internal/daemon/controller/handlers/sessions/session_list_benchmarks_test.go +++ b/internal/daemon/controller/handlers/sessions/session_list_benchmarks_test.go @@ -84,16 +84,16 @@ func BenchmarkSessionList(b *testing.B) { err = kmsThing.AddExternalWrappers(ctx, kms.WithRootWrapper(wrap)) require.NoError(b, err) - iamRepo, err := iam.NewRepository(ctx, rw, rw, kmsThing) + iamRepo, err := iam.NewRepository(rw, rw, kmsThing) require.NoError(b, err) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, kmsThing) + authTokenRepo, err := authtoken.NewRepository(rw, rw, kmsThing) require.NoError(b, err) - pwRepo, err := password.NewRepository(ctx, rw, rw, kmsThing) + pwRepo, err := password.NewRepository(rw, rw, kmsThing) require.NoError(b, err) - serversRepo, err := server.NewRepository(ctx, rw, rw, kmsThing) + serversRepo, err := server.NewRepository(rw, rw, kmsThing) require.NoError(b, err) iamRepoFn := func() (*iam.Repository, error) { @@ -109,7 +109,7 @@ func BenchmarkSessionList(b *testing.B) { return serversRepo, nil } - s, err := sessions.NewService(ctx, sessRepoFn, iamRepoFn) + s, err := sessions.NewService(sessRepoFn, iamRepoFn) require.NoError(b, err) var users []*userWithToken diff --git a/internal/daemon/controller/handlers/sessions/session_service.go b/internal/daemon/controller/handlers/sessions/session_service.go index acc9de58da6..50f48b429ed 100644 --- a/internal/daemon/controller/handlers/sessions/session_service.go +++ b/internal/daemon/controller/handlers/sessions/session_service.go @@ -55,13 +55,13 @@ type Service struct { var _ pbs.SessionServiceServer = (*Service)(nil) // NewService returns a session service which handles session related requests to boundary. -func NewService(ctx context.Context, repoFn session.RepositoryFactory, iamRepoFn common.IamRepoFactory) (Service, error) { +func NewService(repoFn session.RepositoryFactory, iamRepoFn common.IamRepoFactory) (Service, error) { const op = "sessions.NewService" if repoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing session repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing session repository") } if iamRepoFn == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } return Service{repoFn: repoFn, iamRepoFn: iamRepoFn}, nil } @@ -124,7 +124,7 @@ func (s Service) GetSession(ctx context.Context, req *pbs.GetSessionRequest) (*p func (s Service) ListSessions(ctx context.Context, req *pbs.ListSessionsRequest) (*pbs.ListSessionsResponse, error) { const op = "session.(Service).ListSessions" - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } @@ -169,7 +169,7 @@ func (s Service) ListSessions(ctx context.Context, req *pbs.ListSessionsRequest) return &pbs.ListSessionsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -470,13 +470,13 @@ func validateGetRequest(req *pbs.GetSessionRequest) error { return handlers.ValidateGetRequest(handlers.NoopValidatorFn, req, globals.SessionPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListSessionsRequest) error { +func validateListRequest(req *pbs.ListSessionsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Project.Prefix()) && !req.GetRecursive() { badFields["scope_id"] = "This field must be a valid project scope ID or the list operation must be recursive." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/sessions/session_service_test.go b/internal/daemon/controller/handlers/sessions/session_service_test.go index cc49fd9710d..0fdf63f16c4 100644 --- a/internal/daemon/controller/handlers/sessions/session_service_test.go +++ b/internal/daemon/controller/handlers/sessions/session_service_test.go @@ -56,10 +56,10 @@ func TestGetSession(t *testing.T) { return session.NewRepository(ctx, rw, rw, kms, opt...) } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } o, p := iam.TestScopes(t, iamRepo) @@ -138,7 +138,7 @@ func TestGetSession(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := sessions.NewService(ctx, sessRepoFn, iamRepoFn) + s, err := sessions.NewService(sessRepoFn, iamRepoFn) require.NoError(err, "Couldn't create new session service.") requestInfo := authpb.RequestInfo{ @@ -179,10 +179,10 @@ func TestList_Self(t *testing.T) { return session.NewRepository(ctx, rw, rw, kms, opt...) } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } o, pWithSessions := iam.TestScopes(t, iamRepo) @@ -225,7 +225,7 @@ func TestList_Self(t *testing.T) { Endpoint: "tcp://127.0.0.1:22", }) - s, err := sessions.NewService(ctx, sessRepoFn, iamRepoFn) + s, err := sessions.NewService(sessRepoFn, iamRepoFn) require.NoError(t, err, "Couldn't create new session service.") cases := []struct { @@ -288,10 +288,10 @@ func TestList(t *testing.T) { return session.NewRepository(ctx, rw, rw, kms, opt...) } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } _, pNoSessions := iam.TestScopes(t, iamRepo) @@ -518,7 +518,7 @@ func TestList(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { require, assert := require.New(t), assert.New(t) - s, err := sessions.NewService(ctx, sessRepoFn, iamRepoFn) + s, err := sessions.NewService(sessRepoFn, iamRepoFn) require.NoError(err, "Couldn't create new session service.") // Test without anon user @@ -624,10 +624,10 @@ func TestCancel(t *testing.T) { return session.NewRepository(ctx, rw, rw, kms, opt...) } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } o, p := iam.TestScopes(t, iamRepo) @@ -712,7 +712,7 @@ func TestCancel(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := sessions.NewService(ctx, sessRepoFn, iamRepoFn) + s, err := sessions.NewService(sessRepoFn, iamRepoFn) require.NoError(err, "Couldn't create new session service.") tc.req.Version = version diff --git a/internal/daemon/controller/handlers/targets/target_service.go b/internal/daemon/controller/handlers/targets/target_service.go index 477470a3e04..f0a34421c92 100644 --- a/internal/daemon/controller/handlers/targets/target_service.go +++ b/internal/daemon/controller/handlers/targets/target_service.go @@ -191,7 +191,7 @@ func NewService( func (s Service) ListTargets(ctx context.Context, req *pbs.ListTargetsRequest) (*pbs.ListTargetsResponse, error) { const op = "targets.(Service).ListSessions" - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -233,7 +233,7 @@ func (s Service) ListTargets(ctx context.Context, req *pbs.ListTargetsRequest) ( return &pbs.ListTargetsResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -947,7 +947,7 @@ func (s Service) AuthorizeSession(ctx context.Context, req *pbs.AuthorizeSession if protoWorker != nil { sessionComposition.ProtocolWorkerId = protoWorker.GetPublicId() } - sess, err := session.New(ctx, sessionComposition) + sess, err := session.New(sessionComposition) if err != nil { return nil, err } @@ -1819,13 +1819,13 @@ func validateDeleteRequest(req *pbs.DeleteTargetRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, target.Prefixes()...) } -func validateListRequest(ctx context.Context, req *pbs.ListTargetsRequest) error { +func validateListRequest(req *pbs.ListTargetsRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Project.Prefix()) && !req.GetRecursive() { badFields[globals.ScopeIdField] = "This field must be a valid project scope ID or the list operation must be recursive." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/targets/tcp/target_service_test.go b/internal/daemon/controller/handlers/targets/tcp/target_service_test.go index 8abdbe0a041..dd4662206ad 100644 --- a/internal/daemon/controller/handlers/targets/tcp/target_service_test.go +++ b/internal/daemon/controller/handlers/targets/tcp/target_service_test.go @@ -96,19 +96,19 @@ func testService(t *testing.T, ctx context.Context, conn *db.DB, kms *kms.Kms, w return iam.TestRepo(t, conn, wrapper), nil } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } sessionRepoFn := func(opts ...session.Option) (*session.Repository, error) { return session.NewRepository(ctx, rw, rw, kms, opts...) } staticHostRepoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } vaultCredRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticCredRepoFn := func() (*credstatic.Repository, error) { return credstatic.NewRepository(context.Background(), rw, rw, kms) @@ -118,7 +118,6 @@ func testService(t *testing.T, ctx context.Context, conn *db.DB, kms *kms.Kms, w func TestGet(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -130,10 +129,10 @@ func TestGet(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } o, proj := iam.TestScopes(t, iamRepo) @@ -145,6 +144,7 @@ func TestGet(t *testing.T) { hc := static.TestCatalogs(t, conn, proj.GetPublicId(), 1)[0] hs := static.TestSets(t, conn, hc.GetPublicId(), 2) + ctx := context.Background() tar := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test", target.WithHostSources([]string{hs[0].GetPublicId(), hs[1].GetPublicId()})) tarAddr := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test address", target.WithAddress("8.8.8.8")) @@ -222,7 +222,7 @@ func TestGet(t *testing.T) { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := testService(t, ctx, conn, kms, wrapper) + s, err := testService(t, context.Background(), conn, kms, wrapper) require.NoError(err, "Couldn't create a new host set service.") requestInfo := authpb.RequestInfo{ @@ -247,7 +247,6 @@ func TestGet(t *testing.T) { } func TestList(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -259,10 +258,10 @@ func TestList(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } _, projNoTar := iam.TestScopes(t, iamRepo) @@ -284,6 +283,7 @@ func TestList(t *testing.T) { otherHc := static.TestCatalogs(t, conn, otherProj.GetPublicId(), 1)[0] hss := static.TestSets(t, conn, hc.GetPublicId(), 2) otherHss := static.TestSets(t, conn, otherHc.GetPublicId(), 2) + ctx := context.Background() var wantTars []*pb.Target var totalTars []*pb.Target @@ -414,7 +414,6 @@ func TestList(t *testing.T) { func TestDelete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -426,12 +425,13 @@ func TestDelete(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } + ctx := context.Background() org, proj := iam.TestScopes(t, iamRepo) at := authtoken.TestAuthToken(t, conn, kms, org.GetPublicId()) r := iam.TestRole(t, conn, proj.GetPublicId()) @@ -440,7 +440,7 @@ func TestDelete(t *testing.T) { tar := tcp.TestTarget(ctx, t, conn, proj.GetPublicId(), "test") - s, err := testService(t, ctx, conn, kms, wrapper) + s, err := testService(t, context.Background(), conn, kms, wrapper) require.NoError(t, err, "Couldn't create a new target service.") cases := []struct { @@ -482,7 +482,7 @@ func TestDelete(t *testing.T) { PublicId: at.GetPublicId(), Token: at.GetToken(), } - requestContext := context.WithValue(ctx, requests.ContextRequestInformationKey, &requests.RequestContext{}) + requestContext := context.WithValue(context.Background(), requests.ContextRequestInformationKey, &requests.RequestContext{}) ctx := auth.NewVerifierContext(requestContext, iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) got, gErr := s.DeleteTarget(ctx, tc.req) if tc.err != nil { @@ -509,10 +509,10 @@ func TestDelete_twice(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -544,7 +544,6 @@ func TestDelete_twice(t *testing.T) { func TestCreate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -556,10 +555,10 @@ func TestCreate(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -780,7 +779,6 @@ func TestCreate(t *testing.T) { func TestUpdate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -791,10 +789,10 @@ func TestUpdate(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -803,6 +801,7 @@ func TestUpdate(t *testing.T) { _ = iam.TestUserRole(t, conn, r.GetPublicId(), at.GetIamUserId()) _ = iam.TestRoleGrant(t, conn, r.GetPublicId(), "id=*;type=*;actions=*") + ctx := context.Background() repoFn := func(o ...target.Option) (*target.Repository, error) { return target.NewRepository(ctx, rw, rw, kms) } @@ -1275,7 +1274,6 @@ func TestUpdate(t *testing.T) { func TestUpdate_BadVersion(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1287,10 +1285,10 @@ func TestUpdate_BadVersion(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -1299,6 +1297,7 @@ func TestUpdate_BadVersion(t *testing.T) { _ = iam.TestUserRole(t, conn, r.GetPublicId(), at.GetIamUserId()) _ = iam.TestRoleGrant(t, conn, r.GetPublicId(), "id=*;type=*;actions=*") + ctx := context.Background() repoFn := func(o ...target.Option) (*target.Repository, error) { return target.NewRepository(ctx, rw, rw, kms) } @@ -1337,7 +1336,6 @@ func TestUpdate_BadVersion(t *testing.T) { } func TestAddTargetHostSources(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1350,10 +1348,10 @@ func TestAddTargetHostSources(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -1374,6 +1372,7 @@ func TestAddTargetHostSources(t *testing.T) { plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginServer{}), }) + ctx := context.Background() addCases := []struct { name string tar target.Target @@ -1499,7 +1498,6 @@ func TestAddTargetHostSources(t *testing.T) { } func TestSetTargetHostSources(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1512,10 +1510,10 @@ func TestSetTargetHostSources(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -1536,6 +1534,7 @@ func TestSetTargetHostSources(t *testing.T) { plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginServer{}), }) + ctx := context.Background() setCases := []struct { name string tar target.Target @@ -1649,7 +1648,6 @@ func TestSetTargetHostSources(t *testing.T) { } func TestRemoveTargetHostSources(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1662,10 +1660,10 @@ func TestRemoveTargetHostSources(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -1686,6 +1684,7 @@ func TestRemoveTargetHostSources(t *testing.T) { plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&loopback.TestPluginServer{}), }) + ctx := context.Background() removeCases := []struct { name string tar target.Target @@ -1817,7 +1816,6 @@ func TestRemoveTargetHostSources(t *testing.T) { } func TestAddTargetCredentialSources(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -1829,10 +1827,10 @@ func TestAddTargetCredentialSources(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -1850,6 +1848,7 @@ func TestAddTargetCredentialSources(t *testing.T) { storeStatic := credstatic.TestCredentialStore(t, conn, wrapper, proj.GetPublicId()) creds := credstatic.TestUsernamePasswordCredentials(t, conn, wrapper, "user", "pass", storeStatic.GetPublicId(), proj.GetPublicId(), 2) + ctx := context.Background() addCases := []struct { name string tar target.Target @@ -1999,7 +1998,6 @@ func TestAddTargetCredentialSources(t *testing.T) { } func TestSetTargetCredentialSources(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -2011,10 +2009,10 @@ func TestSetTargetCredentialSources(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -2023,7 +2021,7 @@ func TestSetTargetCredentialSources(t *testing.T) { _ = iam.TestUserRole(t, conn, r.GetPublicId(), at.GetIamUserId()) _ = iam.TestRoleGrant(t, conn, r.GetPublicId(), "id=*;type=*;actions=*") - s, err := testService(t, ctx, conn, kms, wrapper) + s, err := testService(t, context.Background(), conn, kms, wrapper) require.NoError(t, err, "Error when getting new target service.") storeVault := vault.TestCredentialStores(t, conn, wrapper, proj.GetPublicId(), 1)[0] @@ -2032,6 +2030,7 @@ func TestSetTargetCredentialSources(t *testing.T) { storeStatic := credstatic.TestCredentialStore(t, conn, wrapper, proj.GetPublicId()) creds := credstatic.TestUsernamePasswordCredentials(t, conn, wrapper, "user", "pass", storeStatic.GetPublicId(), proj.GetPublicId(), 2) + ctx := context.Background() setCases := []struct { name string tar target.Target @@ -2108,7 +2107,7 @@ func TestSetTargetCredentialSources(t *testing.T) { PublicId: at.GetPublicId(), Token: at.GetToken(), } - requestContext := context.WithValue(ctx, requests.ContextRequestInformationKey, &requests.RequestContext{}) + requestContext := context.WithValue(context.Background(), requests.ContextRequestInformationKey, &requests.RequestContext{}) ctx := auth.NewVerifierContext(requestContext, iamRepoFn, tokenRepoFn, serversRepoFn, kms, &requestInfo) got, err := s.SetTargetCredentialSources(ctx, req) require.NoError(t, err, "Got error: %v", s) @@ -2175,7 +2174,6 @@ func TestSetTargetCredentialSources(t *testing.T) { } func TestRemoveTargetCredentialSources(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) @@ -2187,10 +2185,10 @@ func TestRemoveTargetCredentialSources(t *testing.T) { return iamRepo, nil } tokenRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -2199,7 +2197,7 @@ func TestRemoveTargetCredentialSources(t *testing.T) { _ = iam.TestUserRole(t, conn, r.GetPublicId(), at.GetIamUserId()) _ = iam.TestRoleGrant(t, conn, r.GetPublicId(), "id=*;type=*;actions=*") - s, err := testService(t, ctx, conn, kms, wrapper) + s, err := testService(t, context.Background(), conn, kms, wrapper) require.NoError(t, err, "Error when getting new target service.") csVault := vault.TestCredentialStores(t, conn, wrapper, proj.GetPublicId(), 1)[0] @@ -2208,6 +2206,7 @@ func TestRemoveTargetCredentialSources(t *testing.T) { csStatic := credstatic.TestCredentialStores(t, conn, wrapper, proj.GetPublicId(), 1)[0] creds := credstatic.TestUsernamePasswordCredentials(t, conn, wrapper, "u", "p", csStatic.GetPublicId(), proj.GetPublicId(), 2) + ctx := context.Background() removeCases := []struct { name string tar target.Target @@ -2427,27 +2426,27 @@ func TestAuthorizeSession(t *testing.T) { return iamRepo, nil } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } sessionRepoFn := func(opts ...session.Option) (*session.Repository, error) { return session.NewRepository(ctx, rw, rw, kms, opts...) } - staticRepo, err := static.NewRepository(ctx, rw, rw, kms) + staticRepo, err := static.NewRepository(rw, rw, kms) require.NoError(t, err) staticHostRepoFn := func() (*static.Repository, error) { return staticRepo, nil } vaultCredRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticCredRepoFn := func() (*credstatic.Repository, error) { return credstatic.NewRepository(ctx, rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } passwordAuthRepoFn := func() (*password.Repository, error) { - return password.NewRepository(ctx, rw, rw, kms) + return password.NewRepository(rw, rw, kms) } oidcAuthRepoFn := func() (*oidc.Repository, error) { return oidc.NewRepository(ctx, rw, rw, kms) @@ -2482,7 +2481,7 @@ func TestAuthorizeSession(t *testing.T) { }), } pluginHostRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, plgm) + return hostplugin.NewRepository(rw, rw, kms, sche, plgm) } loginName := "foo@bar.com" @@ -2539,7 +2538,7 @@ func TestAuthorizeSession(t *testing.T) { sec, tok := v.CreateToken(t, vault.WithPolicies([]string{"default", "boundary-controller", "pki"})) vaultStore := vault.TestCredentialStore(t, conn, wrapper, proj.GetPublicId(), v.Addr, tok, sec.Auth.Accessor) - credService, err := credentiallibraries.NewService(ctx, vaultCredRepoFn, iamRepoFn) + credService, err := credentiallibraries.NewService(vaultCredRepoFn, iamRepoFn) require.NoError(t, err) clsResp, err := credService.CreateCredentialLibrary(ctx, &pbs.CreateCredentialLibraryRequest{Item: &credlibpb.CredentialLibrary{ CredentialStoreId: vaultStore.GetPublicId(), @@ -2724,25 +2723,25 @@ func TestAuthorizeSessionTypedCredentials(t *testing.T) { return iamRepo, nil } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } sessionRepoFn := func(opts ...session.Option) (*session.Repository, error) { return session.NewRepository(ctx, rw, rw, kms, opts...) } staticHostRepoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } vaultCredRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticCredRepoFn := func() (*credstatic.Repository, error) { return credstatic.NewRepository(ctx, rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } pluginHostRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } org, proj := iam.TestScopes(t, iamRepo) @@ -2777,7 +2776,7 @@ func TestAuthorizeSessionTypedCredentials(t *testing.T) { sec, tok := v.CreateToken(t, vault.WithPolicies([]string{"default", "boundary-controller", "secret"})) vaultStore := vault.TestCredentialStore(t, conn, wrapper, proj.GetPublicId(), v.Addr, tok, sec.Auth.Accessor) - credLibService, err := credentiallibraries.NewService(ctx, vaultCredRepoFn, iamRepoFn) + credLibService, err := credentiallibraries.NewService(vaultCredRepoFn, iamRepoFn) require.NoError(t, err) // Create secret in vault with default username and password fields @@ -2837,7 +2836,7 @@ func TestAuthorizeSessionTypedCredentials(t *testing.T) { require.NoError(t, err) staticStore := credstatic.TestCredentialStore(t, conn, wrapper, proj.GetPublicId()) - credService, err := credentials.NewService(ctx, staticCredRepoFn, iamRepoFn) + credService, err := credentials.NewService(staticCredRepoFn, iamRepoFn) require.NoError(t, err) upCredResp, err := credService.CreateCredential(ctx, &pbs.CreateCredentialRequest{Item: &credpb.Credential{ CredentialStoreId: staticStore.GetPublicId(), @@ -3313,25 +3312,25 @@ func TestAuthorizeSession_Errors(t *testing.T) { return iamRepo, nil } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } sessionRepoFn := func(opts ...session.Option) (*session.Repository, error) { return session.NewRepository(ctx, rw, rw, kms, opts...) } staticHostRepoFn := func() (*static.Repository, error) { - return static.NewRepository(ctx, rw, rw, kms) + return static.NewRepository(rw, rw, kms) } pluginHostRepoFn := func() (*hostplugin.Repository, error) { - return hostplugin.NewRepository(ctx, rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) + return hostplugin.NewRepository(rw, rw, kms, sche, map[string]plgpb.HostPluginServiceClient{}) } vaultCredRepoFn := func() (*vault.Repository, error) { - return vault.NewRepository(ctx, rw, rw, kms, sche) + return vault.NewRepository(rw, rw, kms, sche) } staticCredRepoFn := func() (*credstatic.Repository, error) { return credstatic.NewRepository(ctx, rw, rw, kms) } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kms) + return authtoken.NewRepository(rw, rw, kms) } org, proj := iam.TestScopes(t, iamRepo) @@ -3424,7 +3423,7 @@ func TestAuthorizeSession_Errors(t *testing.T) { } libraryExists := func(tar target.Target) (version uint32) { - credService, err := credentiallibraries.NewService(ctx, vaultCredRepoFn, iamRepoFn) + credService, err := credentiallibraries.NewService(vaultCredRepoFn, iamRepoFn) require.NoError(t, err) clsResp, err := credService.CreateCredentialLibrary(ctx, &pbs.CreateCredentialLibraryRequest{Item: &credlibpb.CredentialLibrary{ CredentialStoreId: store.GetPublicId(), @@ -3449,7 +3448,7 @@ func TestAuthorizeSession_Errors(t *testing.T) { } misConfiguredlibraryExists := func(tar target.Target) (version uint32) { - credService, err := credentiallibraries.NewService(ctx, vaultCredRepoFn, iamRepoFn) + credService, err := credentiallibraries.NewService(vaultCredRepoFn, iamRepoFn) require.NoError(t, err) clsResp, err := credService.CreateCredentialLibrary(ctx, &pbs.CreateCredentialLibraryRequest{Item: &credlibpb.CredentialLibrary{ CredentialStoreId: store.GetPublicId(), @@ -3474,7 +3473,7 @@ func TestAuthorizeSession_Errors(t *testing.T) { } expiredTokenLibrary := func(tar target.Target) (version uint32) { - credService, err := credentiallibraries.NewService(ctx, vaultCredRepoFn, iamRepoFn) + credService, err := credentiallibraries.NewService(vaultCredRepoFn, iamRepoFn) require.NoError(t, err) clsResp, err := credService.CreateCredentialLibrary(ctx, &pbs.CreateCredentialLibraryRequest{Item: &credlibpb.CredentialLibrary{ CredentialStoreId: expiredStore.GetPublicId(), diff --git a/internal/daemon/controller/handlers/targets/tcp/tcp.go b/internal/daemon/controller/handlers/targets/tcp/tcp.go index 67dc086c779..b8f7e2be6d9 100644 --- a/internal/daemon/controller/handlers/targets/tcp/tcp.go +++ b/internal/daemon/controller/handlers/targets/tcp/tcp.go @@ -122,7 +122,6 @@ func init() { var err error if maskManager, err = handlers.NewMaskManager( - context.Background(), handlers.MaskDestination{&tcpStore.Target{}, &store.TargetAddress{}}, handlers.MaskSource{&pb.Target{}, &pb.TcpTargetAttributes{}}, ); err != nil { diff --git a/internal/daemon/controller/handlers/users/user_service.go b/internal/daemon/controller/handlers/users/user_service.go index ad3c4244607..f27730af0ec 100644 --- a/internal/daemon/controller/handlers/users/user_service.go +++ b/internal/daemon/controller/handlers/users/user_service.go @@ -52,11 +52,7 @@ var ( func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.User{}}, - handlers.MaskSource{&pb.User{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.User{}}, handlers.MaskSource{&pb.User{}}); err != nil { panic(err) } } @@ -71,17 +67,17 @@ type Service struct { var _ pbs.UserServiceServer = (*Service)(nil) // NewService returns a user service which handles user related requests to boundary. -func NewService(ctx context.Context, repo common.IamRepoFactory) (Service, error) { +func NewService(repo common.IamRepoFactory) (Service, error) { const op = "users.NewService" if repo == nil { - return Service{}, errors.New(ctx, errors.InvalidParameter, op, "missing iam repository") + return Service{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing iam repository") } return Service{repoFn: repo}, nil } // ListUsers implements the interface pbs.UserServiceServer. func (s Service) ListUsers(ctx context.Context, req *pbs.ListUsersRequest) (*pbs.ListUsersResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -116,7 +112,7 @@ func (s Service) ListUsers(ctx context.Context, req *pbs.ListUsersRequest) (*pbs return &pbs.ListUsersResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -424,7 +420,7 @@ func (s Service) createInRepo(ctx context.Context, orgId string, item *pb.User) if item.GetDescription() != nil { opts = append(opts, iam.WithDescription(item.GetDescription().GetValue())) } - u, err := iam.NewUser(ctx, orgId, opts...) + u, err := iam.NewUser(orgId, opts...) if err != nil { return nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build user for creation: %v.", err) } @@ -452,7 +448,7 @@ func (s Service) updateInRepo(ctx context.Context, orgId, id string, mask []stri opts = append(opts, iam.WithName(name.GetValue())) } version := item.GetVersion() - u, err := iam.NewUser(ctx, orgId, opts...) + u, err := iam.NewUser(orgId, opts...) if err != nil { return nil, nil, handlers.ApiErrorWithCodeAndMessage(codes.Internal, "Unable to build user for update: %v.", err) } @@ -692,13 +688,13 @@ func validateDeleteRequest(req *pbs.DeleteUserRequest) error { return handlers.ValidateDeleteRequest(handlers.NoopValidatorFn, req, globals.UserPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListUsersRequest) error { +func validateListRequest(req *pbs.ListUsersRequest) error { badFields := map[string]string{} if !handlers.ValidId(handlers.Id(req.GetScopeId()), scope.Org.Prefix()) && req.GetScopeId() != scope.Global.String() { badFields["scope_id"] = "Must be 'global' or a valid org scope id when listing." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/users/user_service_test.go b/internal/daemon/controller/handlers/users/user_service_test.go index 8c3e267ccdc..9656d29890e 100644 --- a/internal/daemon/controller/handlers/users/user_service_test.go +++ b/internal/daemon/controller/handlers/users/user_service_test.go @@ -144,7 +144,7 @@ func TestGet(t *testing.T) { req := proto.Clone(toMerge).(*pbs.GetUserRequest) proto.Merge(req, tc.req) - s, err := users.NewService(context.Background(), repoFn) + s, err := users.NewService(repoFn) require.NoError(err, "Couldn't create new user service.") got, gErr := s.GetUser(auth.DisabledAuthTestContext(repoFn, u.GetScopeId()), req) @@ -183,7 +183,7 @@ func TestList(t *testing.T) { secondaryAm := password.TestAuthMethods(t, conn, oWithUsers.PublicId, 1) require.Len(t, secondaryAm, 1) - s, err := users.NewService(context.Background(), repoFn) + s, err := users.NewService(repoFn) require.NoError(t, err) var wantUsers []*pb.User @@ -203,7 +203,7 @@ func TestList(t *testing.T) { // Add new users for i := 0; i < 10; i++ { - newU, err := iam.NewUser(ctx, oWithUsers.GetPublicId()) + newU, err := iam.NewUser(oWithUsers.GetPublicId()) require.NoError(t, err) u, err := repo.CreateUser(context.Background(), newU) require.NoError(t, err) @@ -332,7 +332,7 @@ func (s sortableUsers) Swap(i, j int) { s.users[i], s.users[j] = s.users[j] func TestDelete(t *testing.T) { u, _, repoFn := createDefaultUserAndRepo(t, false) - s, err := users.NewService(context.Background(), repoFn) + s, err := users.NewService(repoFn) require.NoError(t, err, "Error when getting new user service.") cases := []struct { @@ -379,7 +379,7 @@ func TestDelete_twice(t *testing.T) { assert, require := assert.New(t), require.New(t) u, _, repoFn := createDefaultUserAndRepo(t, false) - s, err := users.NewService(context.Background(), repoFn) + s, err := users.NewService(repoFn) require.NoError(err, "Error when getting new user service") req := &pbs.DeleteUserRequest{ Id: u.GetPublicId(), @@ -471,7 +471,7 @@ func TestCreate(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := users.NewService(context.Background(), repoFn) + s, err := users.NewService(repoFn) require.NoError(err, "Error when getting new user service.") got, gErr := s.CreateUser(auth.DisabledAuthTestContext(repoFn, tc.req.GetItem().GetScopeId()), tc.req) @@ -500,7 +500,7 @@ func TestCreate(t *testing.T) { func TestUpdate(t *testing.T) { u, _, repoFn := createDefaultUserAndRepo(t, false) - tested, err := users.NewService(context.Background(), repoFn) + tested, err := users.NewService(repoFn) require.NoError(t, err, "Error when getting new user service.") created := u.GetCreateTime().GetTimestamp().AsTime() @@ -777,7 +777,7 @@ func TestAddAccount(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := users.NewService(ctx, repoFn) + s, err := users.NewService(repoFn) require.NoError(t, err, "Error when getting new user service.") o, _ := iam.TestScopes(t, iamRepo) @@ -937,7 +937,7 @@ func TestSetAccount(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := users.NewService(ctx, repoFn) + s, err := users.NewService(repoFn) require.NoError(t, err, "Error when getting new user service.") o, _ := iam.TestScopes(t, iamRepo) @@ -1099,7 +1099,7 @@ func TestRemoveAccount(t *testing.T) { repoFn := func() (*iam.Repository, error) { return iamRepo, nil } - s, err := users.NewService(ctx, repoFn) + s, err := users.NewService(repoFn) require.NoError(t, err, "Error when getting new user service.") o, _ := iam.TestScopes(t, iamRepo) diff --git a/internal/daemon/controller/handlers/workers/worker_service.go b/internal/daemon/controller/handlers/workers/worker_service.go index b2d9218c44d..09d3eddc29b 100644 --- a/internal/daemon/controller/handlers/workers/worker_service.go +++ b/internal/daemon/controller/handlers/workers/worker_service.go @@ -73,11 +73,7 @@ var ( func init() { var err error - if maskManager, err = handlers.NewMaskManager( - context.Background(), - handlers.MaskDestination{&store.Worker{}}, - handlers.MaskSource{&pb.Worker{}}, - ); err != nil { + if maskManager, err = handlers.NewMaskManager(handlers.MaskDestination{&store.Worker{}}, handlers.MaskSource{&pb.Worker{}}); err != nil { panic(err) } } @@ -117,7 +113,7 @@ func NewService(ctx context.Context, repo common.ServersRepoFactory, iamRepoFn c // ListWorkers implements the interface pbs.WorkerServiceServer. func (s Service) ListWorkers(ctx context.Context, req *pbs.ListWorkersRequest) (*pbs.ListWorkersResponse, error) { - if err := validateListRequest(ctx, req); err != nil { + if err := validateListRequest(req); err != nil { return nil, err } authResults := s.authResult(ctx, req.GetScopeId(), action.List) @@ -152,7 +148,7 @@ func (s Service) ListWorkers(ctx context.Context, req *pbs.ListWorkersRequest) ( return &pbs.ListWorkersResponse{}, nil } - filter, err := handlers.NewFilter(ctx, req.GetFilter()) + filter, err := handlers.NewFilter(req.GetFilter()) if err != nil { return nil, err } @@ -952,12 +948,12 @@ func validateGetRequest(req *pbs.GetWorkerRequest) error { return handlers.ValidateGetRequest(handlers.NoopValidatorFn, req, globals.WorkerPrefix) } -func validateListRequest(ctx context.Context, req *pbs.ListWorkersRequest) error { +func validateListRequest(req *pbs.ListWorkersRequest) error { badFields := map[string]string{} if req.GetScopeId() != scope.Global.String() { badFields["scope_id"] = "Must be 'global' when listing." } - if _, err := handlers.NewFilter(ctx, req.GetFilter()); err != nil { + if _, err := handlers.NewFilter(req.GetFilter()); err != nil { badFields["filter"] = fmt.Sprintf("This field could not be parsed. %v", err) } if len(badFields) > 0 { diff --git a/internal/daemon/controller/handlers/workers/worker_service_test.go b/internal/daemon/controller/handlers/workers/worker_service_test.go index be2da719fed..144dbe22333 100644 --- a/internal/daemon/controller/handlers/workers/worker_service_test.go +++ b/internal/daemon/controller/handlers/workers/worker_service_test.go @@ -87,7 +87,7 @@ func TestGet(t *testing.T) { } rw := db.New(conn) kms := kms.TestKms(t, conn, wrap) - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) repoFn := func() (*server.Repository, error) { return repo, nil @@ -310,7 +310,7 @@ func TestList(t *testing.T) { rw := db.New(conn) kms := kms.TestKms(t, conn, wrap) repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } oldDownstramFn := downstreamWorkers t.Cleanup(func() { @@ -451,7 +451,6 @@ func TestList(t *testing.T) { } func TestDelete(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrap := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrap) @@ -461,8 +460,9 @@ func TestDelete(t *testing.T) { rw := db.New(conn) kms := kms.TestKms(t, conn, wrap) repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kms) + return server.NewRepository(rw, rw, kms) } + ctx := context.Background() workerAuthRepo, err := server.NewRepositoryStorage(ctx, rw, rw, kms) require.NoError(t, err) @@ -551,7 +551,7 @@ func TestUpdate(t *testing.T) { iamRepoFn := func() (*iam.Repository, error) { return iamRepo, nil } - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) repoFn := func() (*server.Repository, error) { return repo, nil @@ -1063,7 +1063,7 @@ func TestUpdate_DeprecatedKMS(t *testing.T) { iamRepoFn := func() (*iam.Repository, error) { return iamRepo, nil } - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) repoFn := func() (*server.Repository, error) { return repo, nil @@ -1161,7 +1161,7 @@ func TestUpdate_BadVersion(t *testing.T) { _, proj := iam.TestScopes(t, iamRepo) - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err, "Couldn't create new worker repo.") repoFn := func() (*server.Repository, error) { return repo, nil @@ -1186,7 +1186,6 @@ func TestUpdate_BadVersion(t *testing.T) { } func TestCreateWorkerLed(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") testRootWrapper := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, testRootWrapper) @@ -1196,9 +1195,10 @@ func TestCreateWorkerLed(t *testing.T) { rw := db.New(conn) testKms := kms.TestKms(t, conn, testRootWrapper) repoFn := func() (*server.Repository, error) { - return server.NewRepository(testCtx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } + testCtx := context.Background() workerAuthRepo, err := server.NewRepositoryStorage(testCtx, rw, rw, testKms) require.NoError(t, err) workerAuthRepoFn := func() (*server.WorkerAuthRepositoryStorage, error) { @@ -1455,7 +1455,7 @@ func TestCreateWorkerLed(t *testing.T) { name: "create-error", service: func() Service { repoFn := func() (*server.Repository, error) { - return server.NewRepository(testCtx, rw, &db.Db{}, testKms) + return server.NewRepository(rw, &db.Db{}, testKms) } testSrv, err := NewService(testCtx, repoFn, iamRepoFn, workerAuthRepoFn, nil) require.NoError(t, err, "Error when getting new worker service.") @@ -1487,7 +1487,7 @@ func TestCreateWorkerLed(t *testing.T) { case cnt > 1: return nil, errors.New(testCtx, errors.Internal, "bad-repo-function", "error creating repo") default: - return server.NewRepository(testCtx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } } testSrv, err := NewService(testCtx, repoFn, iamRepoFn, workerAuthRepoFn, nil) @@ -1571,7 +1571,6 @@ func TestCreateWorkerLed(t *testing.T) { } func TestCreateControllerLed(t *testing.T) { - testCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") testRootWrapper := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, testRootWrapper) @@ -1581,8 +1580,9 @@ func TestCreateControllerLed(t *testing.T) { rw := db.New(conn) testKms := kms.TestKms(t, conn, testRootWrapper) repoFn := func() (*server.Repository, error) { - return server.NewRepository(testCtx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } + testCtx := context.Background() rootStorage, err := server.NewRepositoryStorage(testCtx, rw, rw, testKms) require.NoError(t, err) @@ -1802,7 +1802,7 @@ func TestCreateControllerLed(t *testing.T) { name: "create-error", service: func() Service { repoFn := func() (*server.Repository, error) { - return server.NewRepository(testCtx, rw, &db.Db{}, testKms) + return server.NewRepository(rw, &db.Db{}, testKms) } testSrv, err := NewService(testCtx, repoFn, iamRepoFn, authRepoFn, nil) require.NoError(t, err, "Error when getting new worker service.") @@ -1833,7 +1833,7 @@ func TestCreateControllerLed(t *testing.T) { case cnt > 1: return nil, errors.New(testCtx, errors.Internal, "bad-repo-function", "error creating repo") default: - return server.NewRepository(testCtx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } } testSrv, err := NewService(testCtx, repoFn, iamRepoFn, authRepoFn, nil) @@ -1931,7 +1931,7 @@ func TestService_AddWorkerTags(t *testing.T) { rw := db.New(conn) testKms := kms.TestKms(t, conn, wrapper) repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } workerAuthRepo, err := server.NewRepositoryStorage(ctx, rw, rw, testKms) require.NoError(err) @@ -2091,7 +2091,7 @@ func TestService_SetWorkerTags(t *testing.T) { rw := db.New(conn) testKms := kms.TestKms(t, conn, wrapper) repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } workerAuthRepo, err := server.NewRepositoryStorage(ctx, rw, rw, testKms) require.NoError(err) @@ -2254,7 +2254,7 @@ func TestService_RemoveWorkerTags(t *testing.T) { rw := db.New(conn) testKms := kms.TestKms(t, conn, wrapper) repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, testKms) + return server.NewRepository(rw, rw, testKms) } workerAuthRepo, err := server.NewRepositoryStorage(ctx, rw, rw, testKms) require.NoError(err) @@ -2447,7 +2447,7 @@ func TestReadCertificateAuthority(t *testing.T) { } repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } workerAuthRepo, err := server.NewRepositoryStorage(ctx, rw, rw, kmsCache) @@ -2523,7 +2523,7 @@ func TestReinitializeCertificateAuthority(t *testing.T) { } repoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } workerAuthRepo, err := server.NewRepositoryStorage(ctx, rw, rw, kmsCache) diff --git a/internal/daemon/controller/interceptor_test.go b/internal/daemon/controller/interceptor_test.go index 0b5ba5d64c4..8329a9c7914 100644 --- a/internal/daemon/controller/interceptor_test.go +++ b/internal/daemon/controller/interceptor_test.go @@ -37,7 +37,6 @@ import ( ) func Test_unaryCtxInterceptor(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -48,10 +47,10 @@ func Test_unaryCtxInterceptor(t *testing.T) { return iamRepo, nil } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(ctx, rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(ctx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } validGatewayTicket := "valid-ticket" @@ -355,7 +354,6 @@ func Test_unaryCtxInterceptor(t *testing.T) { func Test_streamCtxInterceptor(t *testing.T) { t.Parallel() - factoryCtx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -366,17 +364,17 @@ func Test_streamCtxInterceptor(t *testing.T) { return iamRepo, nil } atRepoFn := func() (*authtoken.Repository, error) { - return authtoken.NewRepository(context.Background(), rw, rw, kmsCache) + return authtoken.NewRepository(rw, rw, kmsCache) } serversRepoFn := func() (*server.Repository, error) { - return server.NewRepository(factoryCtx, rw, rw, kmsCache) + return server.NewRepository(rw, rw, kmsCache) } validGatewayTicket := "valid-ticket" o, _ := iam.TestScopes(t, iamRepo) at := authtoken.TestAuthToken(t, conn, kmsCache, o.GetPublicId()) - encToken, err := authtoken.EncryptToken(factoryCtx, kmsCache, o.GetPublicId(), at.GetPublicId(), at.GetToken()) + encToken, err := authtoken.EncryptToken(context.Background(), kmsCache, o.GetPublicId(), at.GetPublicId(), at.GetToken()) require.NoError(t, err) tokValue := at.GetPublicId() + "_" + encToken @@ -395,7 +393,7 @@ func Test_streamCtxInterceptor(t *testing.T) { marshalledRequestInfo, err := proto.Marshal(&requestInfo) require.NoError(t, err) md := metadata.Pairs(requestInfoMdKey, base58.FastBase58Encoding(marshalledRequestInfo)) - mdCtx := metadata.NewIncomingContext(factoryCtx, md) + mdCtx := metadata.NewIncomingContext(context.Background(), md) md, ok := metadata.FromIncomingContext(mdCtx) require.True(t, ok) @@ -404,6 +402,8 @@ func Test_streamCtxInterceptor(t *testing.T) { return mdCtx } + factoryCtx := context.Background() + c := event.TestEventerConfig(t, "Test_streamCtxInterceptor", event.TestWithAuditSink(t), event.TestWithObservationSink(t)) testLock := &sync.Mutex{} testLogger := hclog.New(&hclog.LoggerOptions{ diff --git a/internal/daemon/controller/testing.go b/internal/daemon/controller/testing.go index 8a635c6fb4f..0d7420839ed 100644 --- a/internal/daemon/controller/testing.go +++ b/internal/daemon/controller/testing.go @@ -630,7 +630,7 @@ func TestControllerConfig(t testing.TB, ctx context.Context, tc *TestController, opts.Config.Controller = new(config.Controller) } if opts.Config.Controller.Name == "" { - require.NoError(t, opts.Config.Controller.InitNameIfEmpty(ctxTest)) + require.NoError(t, opts.Config.Controller.InitNameIfEmpty()) } opts.Config.Controller.Scheduler.JobRunIntervalDuration = opts.SchedulerRunJobInterval @@ -647,7 +647,7 @@ func TestControllerConfig(t testing.TB, ctx context.Context, tc *TestController, t.Fatal(err) } serverName = fmt.Sprintf("%s/controller", serverName) - if err := tc.b.SetupEventing(ctxTest, tc.b.Logger, tc.b.StderrLock, serverName, base.WithEventerConfig(opts.Config.Eventing)); err != nil { + if err := tc.b.SetupEventing(tc.b.Logger, tc.b.StderrLock, serverName, base.WithEventerConfig(opts.Config.Eventing)); err != nil { t.Fatal(err) } diff --git a/internal/daemon/worker/testing.go b/internal/daemon/worker/testing.go index 17e2cc51e58..76e854e40be 100644 --- a/internal/daemon/worker/testing.go +++ b/internal/daemon/worker/testing.go @@ -324,7 +324,7 @@ func NewTestWorker(t testing.TB, opts *TestWorkerOpts) *TestWorker { t.Fatal(err) } serverName = fmt.Sprintf("%s/worker", serverName) - if err := tw.b.SetupEventing(tw.b.Context, tw.b.Logger, tw.b.StderrLock, serverName, base.WithEventerConfig(opts.Config.Eventing)); err != nil { + if err := tw.b.SetupEventing(tw.b.Logger, tw.b.StderrLock, serverName, base.WithEventerConfig(opts.Config.Eventing)); err != nil { t.Fatal(err) } @@ -412,7 +412,7 @@ func (tw *TestWorker) AddClusterWorkerMember(t testing.TB, opts *TestWorkerOpts) } if nextOpts.Name == "" { var err error - nextOpts.Name, err = db.NewPublicId(context.Background(), "w") + nextOpts.Name, err = db.NewPublicId("w") if err != nil { t.Fatal(err) } diff --git a/internal/db/id.go b/internal/db/id.go index 050c777a426..4d94976acfc 100644 --- a/internal/db/id.go +++ b/internal/db/id.go @@ -5,7 +5,6 @@ package db import ( "bytes" - "context" "fmt" "strings" @@ -14,19 +13,19 @@ import ( "golang.org/x/crypto/blake2b" ) -func NewPrivateId(ctx context.Context, prefix string, opt ...Option) (string, error) { - return newId(ctx, prefix, opt...) +func NewPrivateId(prefix string, opt ...Option) (string, error) { + return newId(prefix, opt...) } // NewPublicId creates a new public id with the prefix -func NewPublicId(ctx context.Context, prefix string, opt ...Option) (string, error) { - return newId(ctx, prefix, opt...) +func NewPublicId(prefix string, opt ...Option) (string, error) { + return newId(prefix, opt...) } -func newId(ctx context.Context, prefix string, opt ...Option) (string, error) { +func newId(prefix string, opt ...Option) (string, error) { const op = "db.newId" if prefix == "" { - return "", errors.New(ctx, errors.InvalidParameter, op, "missing prefix") + return "", errors.NewDeprecated(errors.InvalidParameter, op, "missing prefix") } var publicId string var err error @@ -39,7 +38,7 @@ func newId(ctx context.Context, prefix string, opt ...Option) (string, error) { publicId, err = base62.Random(10) } if err != nil { - return "", errors.Wrap(ctx, err, op, errors.WithMsg("unable to generate id"), errors.WithCode(errors.Io)) + return "", errors.WrapDeprecated(err, op, errors.WithMsg("unable to generate id"), errors.WithCode(errors.Io)) } return fmt.Sprintf("%s_%s", prefix, publicId), nil } diff --git a/internal/db/id_test.go b/internal/db/id_test.go index 487a8c4c1c0..58d89d20d0f 100644 --- a/internal/db/id_test.go +++ b/internal/db/id_test.go @@ -4,7 +4,6 @@ package db import ( - "context" "strings" "testing" @@ -41,7 +40,7 @@ func TestNewPublicId(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := NewPublicId(context.Background(), tt.args.prefix) + got, err := NewPublicId(tt.args.prefix) if (err != nil) != tt.wantErr { t.Errorf("NewPublicId() error = %v, wantErr %v", err, tt.wantErr) return @@ -86,7 +85,7 @@ func TestNewPrivateId(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewPrivateId(context.Background(), tt.args.prefix) + got, err := NewPrivateId(tt.args.prefix) if tt.wantErr { assert.Error(err) return @@ -138,7 +137,7 @@ func TestPseudoRandomId(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewPublicId(context.Background(), "id", WithPrngValues(tt.args.prngValues)) + got, err := NewPublicId("id", WithPrngValues(tt.args.prngValues)) require.NoError(err) if tt.sameAsPrev { assert.Equal(prevTestValue, got) diff --git a/internal/db/read_writer_ext_test.go b/internal/db/read_writer_ext_test.go index 5b5e53b58d5..15a61d836b6 100644 --- a/internal/db/read_writer_ext_test.go +++ b/internal/db/read_writer_ext_test.go @@ -26,7 +26,7 @@ func TestDb_Create_OnConflict(t *testing.T) { createInitialUser := func() *db_test.TestUser { // create initial user for on conflict tests - id, err := db.NewPublicId(ctx, "test-user") + id, err := db.NewPublicId("test-user") require.NoError(t, err) initialUser, err := db_test.NewTestUser() require.NoError(t, err) @@ -163,7 +163,7 @@ func TestDb_Create_OnConflict(t *testing.T) { initialUser := createInitialUser() conflictUser, err := db_test.NewTestUser() require.NoError(err) - userNameId, err := db.NewPublicId(ctx, "test-user-name") + userNameId, err := db.NewPublicId("test-user-name") require.NoError(err) conflictUser.PublicId = initialUser.PublicId conflictUser.Name = userNameId @@ -207,7 +207,7 @@ func TestDb_Create_OnConflict(t *testing.T) { initialUser := createInitialUser() conflictUser, err := db_test.NewTestUser() require.NoError(err) - userNameId, err := db.NewPublicId(ctx, "test-user-name") + userNameId, err := db.NewPublicId("test-user-name") require.NoError(err) conflictUser.PublicId = initialUser.PublicId conflictUser.Name = userNameId diff --git a/internal/db/read_writer_test.go b/internal/db/read_writer_test.go index 5a36cb5f97b..d4d17f83271 100644 --- a/internal/db/read_writer_test.go +++ b/internal/db/read_writer_test.go @@ -59,7 +59,7 @@ func TestDb_Update(t *testing.T) { conn, _ := TestSetup(t, "postgres") TestCreateTables(t, conn) now := ×tamp.Timestamp{Timestamp: timestamppb.Now()} - publicId, err := NewPublicId(testCtx, "testuser") + publicId, err := NewPublicId("testuser") require.NoError(t, err) id := testId(t) wrapper := TestDBWrapper(t, conn, "oplog") diff --git a/internal/db/schema/migrations/oss/postgres/73/01_census_logging.up.sql b/internal/db/schema/migrations/oss/postgres/73/01_census_logging.up.sql deleted file mode 100644 index 1c7aad40b5d..00000000000 --- a/internal/db/schema/migrations/oss/postgres/73/01_census_logging.up.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Copyright (c) HashiCorp, Inc. --- SPDX-License-Identifier: MPL-2.0 - -begin; - - create table census_last_logged ( - last_logged_at wt_timestamp primary key - ); - comment on table census_last_logged is - 'census_last_logged is a table with 1 row which contains the timestamp ' - 'of the last time the census status and snapshots were logged.'; - - -- This index ensures that there will only ever be one row in the table. - -- See: https://www.postgresql.org/docs/current/indexes-expressional.html - create unique index census_last_logged_one_row - on census_last_logged((last_logged_at is not null)); - - insert into census_last_logged(last_logged_at) values('-infinity'); - -commit; diff --git a/internal/db/schema/migrations/oss/postgres/74/01_hcp_update_sessions_function.up.sql b/internal/db/schema/migrations/oss/postgres/74/01_hcp_update_sessions_function.up.sql deleted file mode 100644 index c9d8c29c843..00000000000 --- a/internal/db/schema/migrations/oss/postgres/74/01_hcp_update_sessions_function.up.sql +++ /dev/null @@ -1,75 +0,0 @@ --- Copyright (c) HashiCorp, Inc. --- SPDX-License-Identifier: MPL-2.0 - -begin; - --- replaces function from 70/01_hcp_billing_daily.up.sql - drop function update_sessions_pending_daily_snapshot; - - create function update_sessions_pending_daily_snapshot() - returns setof sessions_pending_daily_snapshot - as $$ - begin - - -- already ran for today - if (date_trunc('day', now()) - '1 day'::interval) = (select max(snapshot_date) from sessions_pending_daily_snapshot) - then return; - end if; - - -- never run before and there are only sessions starting from today - if (select count(*) from sessions_pending_daily_snapshot) = 0 - and date_trunc('day', now()) = (select min(session_pending_time) from wh_session_accumulating_fact) - then return query - insert into sessions_pending_daily_snapshot - (snapshot_date, sessions_pending_count) - values - (date_trunc('day', now()) - '1 day'::interval, 0) - returning *; - return; - end if; - - return query - with - daily_counts (day, sessions_pending_count) as ( - select date_trunc('day', session_pending_time), count(*) - from wh_session_accumulating_fact - where session_pending_time < date_trunc('day', now()) -- before midnight today - and session_pending_time >= coalesce((select max(snapshot_date) from sessions_pending_daily_snapshot), '-infinity') - group by date_trunc('day', session_pending_time) - ), - daily_range (day) as ( - select bucket - from generate_series( - coalesce(date_trunc('day', (select max(snapshot_date) from sessions_pending_daily_snapshot) + '1 day'::interval), - date_trunc('day', (select min(session_pending_time) from wh_session_accumulating_fact)), - date_trunc('day', now()) - '1 day'::interval), - now() - '1 day'::interval, - '1 day'::interval - ) as bucket - ), - missing (day, sessions_pending_count) as ( - select daily_range.day::timestamp with time zone, - coalesce(daily_counts.sessions_pending_count, 0) - from daily_range - left join daily_counts on daily_range.day = daily_counts.day - ), - final (day, sessions_pending_count) as ( - insert into sessions_pending_daily_snapshot - (snapshot_date, sessions_pending_count) - select day::date, sessions_pending_count - from missing - returning * - ) - select day, sessions_pending_count - from final - order by day desc; - end; - $$ language plpgsql - set timezone to 'utc'; - comment on function update_sessions_pending_daily_snapshot is - 'update_sessions_pending_daily_snapshot is a function that updates the sessions_pending_daily_snapshot table by ' - 'querying the data warehouse and inserting the session pending counts for any days since the max snapshot_date ' - 'and yesterday. ' - 'update_sessions_pending_daily_snapshot returns the rows inserted or null if no rows are inserted.'; - -commit; \ No newline at end of file diff --git a/internal/db/schema/migrations/oss/postgres_2_10_test.go b/internal/db/schema/migrations/oss/postgres_2_10_test.go index a382dcb2e28..70d4f931800 100644 --- a/internal/db/schema/migrations/oss/postgres_2_10_test.go +++ b/internal/db/schema/migrations/oss/postgres_2_10_test.go @@ -53,9 +53,9 @@ func Test_AuthMethodSubtypes(t *testing.T) { assert.Equal(updatedOidc.Name, oidcParent.Name) // test password subtype insert - pw, err := password.NewAuthMethod(ctx, org.PublicId, password.WithName("eve's favorite")) + pw, err := password.NewAuthMethod(org.PublicId, password.WithName("eve's favorite")) require.NoError(err) - passRepo, err := password.NewRepository(ctx, rw, rw, kmsCache) + passRepo, err := password.NewRepository(rw, rw, kmsCache) require.NoError(err) pw, err = passRepo.CreateAuthMethod(ctx, pw) require.NoError(err) diff --git a/internal/db/schema/migrations/oss/postgres_30_01_test.go b/internal/db/schema/migrations/oss/postgres_30_01_test.go index 4d7240bb470..7744da0a134 100644 --- a/internal/db/schema/migrations/oss/postgres_30_01_test.go +++ b/internal/db/schema/migrations/oss/postgres_30_01_test.go @@ -426,21 +426,21 @@ func loadCurrentDekVersions(t *testing.T, rw *db.Db) []dekVersion { func testId(t testing.TB, prefix string) string { t.Helper() - id, err := db.NewPublicId(context.Background(), prefix) + id, err := db.NewPublicId(prefix) require.NoError(t, err) return id } func testScope(t *testing.T, rw *db.Db) *iam.Scope { t.Helper() - ctx := context.Background() require := require.New(t) + testCtx := context.Background() - s, err := iam.NewOrg(ctx) + s, err := iam.NewOrg() require.NoError(err) s.PublicId = testId(t, "o") - require.NoError(rw.Create(ctx, &s)) + require.NoError(rw.Create(testCtx, &s)) return s } diff --git a/internal/db/schema/migrations/oss/postgres_40_01_test.go b/internal/db/schema/migrations/oss/postgres_40_01_test.go index 721a95f262c..938bb462d3e 100644 --- a/internal/db/schema/migrations/oss/postgres_40_01_test.go +++ b/internal/db/schema/migrations/oss/postgres_40_01_test.go @@ -75,7 +75,7 @@ func TestMigrations_Credential_Purpose_Refactor(t *testing.T) { wrapper := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrapper) - authRepo, err := authtoken.NewRepository(ctx, rw, rw, kmsCache) + authRepo, err := authtoken.NewRepository(rw, rw, kmsCache) require.NoError(t, err) uId := "u_1234567890" @@ -246,13 +246,13 @@ values cred1 := credsStatic[0] cred2 := credsStatic[1] - appCredLib, err := target.NewCredentialLibrary(ctx, targetId, lib1.GetPublicId(), "application") + appCredLib, err := target.NewCredentialLibrary(targetId, lib1.GetPublicId(), "application") require.NoError(t, err) - egressCredLib, err := target.NewCredentialLibrary(ctx, targetId, lib2.GetPublicId(), "egress") + egressCredLib, err := target.NewCredentialLibrary(targetId, lib2.GetPublicId(), "egress") require.NoError(t, err) - appCred, err := target.NewStaticCredential(ctx, targetId, cred1.PublicId, "application") + appCred, err := target.NewStaticCredential(targetId, cred1.PublicId, "application") require.NoError(t, err) - egressCred, err := target.NewStaticCredential(ctx, targetId, cred2.PublicId, "egress") + egressCred, err := target.NewStaticCredential(targetId, cred2.PublicId, "egress") require.NoError(t, err) err = rw.CreateItems(ctx, []any{appCredLib, egressCredLib}) diff --git a/internal/db/schema/migrations/oss/postgres_57_01_test.go b/internal/db/schema/migrations/oss/postgres_57_01_test.go index 8992058ff64..3aa920d908b 100644 --- a/internal/db/schema/migrations/oss/postgres_57_01_test.go +++ b/internal/db/schema/migrations/oss/postgres_57_01_test.go @@ -76,7 +76,7 @@ func TestMigrations_DeleteOrphanedAccounts(t *testing.T) { // Create the user we will associate accounts with iamRepo := iam.TestRepo(t, conn, wrapper) - usr, err := iam.NewUser(ctx, scope.Global.String()) + usr, err := iam.NewUser(scope.Global.String()) require.NoError(t, err) usr.PublicId = "u_1234567890" num, err := rw.Exec(ctx, ` @@ -90,7 +90,7 @@ values // Create the accounts we will delete and assert their behavior - pwAm1, err := password.NewAuthMethod(ctx, scope.Global.String()) + pwAm1, err := password.NewAuthMethod(scope.Global.String()) require.NoError(t, err) pwAm1.PublicId = "ampw_1234567890" _, err = rw.DoTx(ctx, 0, db.ExpBackoff{}, func(r db.Reader, w db.Writer) error { @@ -112,7 +112,7 @@ values return err }) require.NoError(t, err) - pwAcct1, err := password.NewAccount(ctx, pwAm1.GetPublicId(), password.WithLoginName("account1"), password.WithPassword("password")) + pwAcct1, err := password.NewAccount(pwAm1.GetPublicId(), password.WithLoginName("account1"), password.WithPassword("password")) require.NoError(t, err) pwAcct1.PublicId = "acctpw_1234567890" num, err = rw.Exec(ctx, ` @@ -124,7 +124,7 @@ values require.NoError(t, err) assert.Equal(t, 1, num) - pwAm2, err := password.NewAuthMethod(ctx, scope.Global.String()) + pwAm2, err := password.NewAuthMethod(scope.Global.String()) require.NoError(t, err) pwAm2.PublicId = "ampw_0123456789" _, err = rw.DoTx(ctx, 0, db.ExpBackoff{}, func(r db.Reader, w db.Writer) error { @@ -145,7 +145,7 @@ values `, []any{pwAm2.PublicId, "arg2conf_0123456789", pwAm2.ScopeId}) return err }) - pwAcct2, err := password.NewAccount(ctx, pwAm2.GetPublicId(), password.WithLoginName("account2"), password.WithPassword("password")) + pwAcct2, err := password.NewAccount(pwAm2.GetPublicId(), password.WithLoginName("account2"), password.WithPassword("password")) require.NoError(t, err) pwAcct2.PublicId = "acctpw_0123456789" num, err = rw.Exec(ctx, ` diff --git a/internal/db/sqltest/tests/census/last_logged.sql b/internal/db/sqltest/tests/census/last_logged.sql deleted file mode 100644 index f23c23510f9..00000000000 --- a/internal/db/sqltest/tests/census/last_logged.sql +++ /dev/null @@ -1,31 +0,0 @@ --- Copyright (c) HashiCorp, Inc. --- SPDX-License-Identifier: MPL-2.0 - -begin; - - select plan(6); - - select has_table('census_last_logged'); - select is(count(*), 1::bigint, 'census_last_logged should have only 1 row') from census_last_logged; - select ok(not isfinite(last_logged_at)) from census_last_logged; - - prepare insert_row as - insert into census_last_logged - (last_logged_at) - values - (now()); - - select throws_ok('insert_row', '23505', - 'duplicate key value violates unique constraint "census_last_logged_one_row"', - 'insert into census_last_logged should fail'); - - prepare update_census_last_logged as - update census_last_logged - set last_logged_at = now(); - - select lives_ok('update_census_last_logged'); - select ok(isfinite(last_logged_at)) from census_last_logged; - - select * from finish(); - -rollback; diff --git a/internal/db/sqltest/tests/hcp/billing/update_sessions_pending_daily_snapshot.sql b/internal/db/sqltest/tests/hcp/billing/update_sessions_pending_daily_snapshot.sql index 72a943c8a79..654e2ddb228 100644 --- a/internal/db/sqltest/tests/hcp/billing/update_sessions_pending_daily_snapshot.sql +++ b/internal/db/sqltest/tests/hcp/billing/update_sessions_pending_daily_snapshot.sql @@ -2,7 +2,7 @@ -- SPDX-License-Identifier: MPL-2.0 begin; - select plan(38); + select plan(27); select has_function('update_sessions_pending_daily_snapshot'); select volatility_is('update_sessions_pending_daily_snapshot', 'volatile'); @@ -103,7 +103,7 @@ begin; insert into test_table_data (snapshot_date, sessions_pending_count) select yesterday()::date, 1; select results_eq('call_update_sessions_pending_daily_snapshot', 'select_test_table_data'); - -- upgrade install, sessions are for today, yesterday, and 2 days ago + -- upgrade install, sessions are for today, yesterday, and 3 days ago select reset_data(); select test_add_session(yesterday() - '1 day'::interval); select test_add_session(yesterday()); @@ -112,14 +112,5 @@ begin; insert into test_table_data (snapshot_date, sessions_pending_count) select yesterday()::date, 1; select results_eq('call_update_sessions_pending_daily_snapshot', 'select_test_table_data'); - -- upgrade install, add session for 2 days ago, do not add a session for yesterday - -- add row to real sessions_pending table, as update_sessions_pending_daily_snapshot fn needs to read from it if missing data from yesterday - select reset_data(); - select test_add_session(yesterday() - '1 day'::interval); - insert into sessions_pending_daily_snapshot (snapshot_date, sessions_pending_count) select yesterday()::date - '1 day'::interval, 1; - insert into test_table_data (snapshot_date, sessions_pending_count) select yesterday()::date, 0; - select results_eq('call_update_sessions_pending_daily_snapshot', 'select_test_table_data'); - - select reset_data(); select * from finish(); rollback; diff --git a/internal/errors/error.go b/internal/errors/error.go index 67889085cc6..bf56f502431 100644 --- a/internal/errors/error.go +++ b/internal/errors/error.go @@ -158,6 +158,30 @@ func Wrap(ctx context.Context, e error, op Op, opt ...Option) error { return E(ctx, opt...) } +// EDeprecated is the legacy version of E which does not +// create an event. Please refrain from using this. +// When all calls are moved from EDeprecated to +// E, please update ICU-1883 +func EDeprecated(opt ...Option) error { + return E(context.TODO(), opt...) +} + +// NewDeprecated is the legacy version of New which does not +// create an event. Please refrain from using this. +// When all calls are moved from NewDeprecated to +// New, please update ICU-1883 +func NewDeprecated(c Code, op Op, msg string, opt ...Option) error { + return New(context.TODO(), c, op, msg, opt...) +} + +// WrapDeprecated is the legacy version of New which does not +// create an event. Please refrain from using this. +// When all calls are moved from WrapDeprecated to +// New, please update ICU-1884 +func WrapDeprecated(e error, op Op, opt ...Option) error { + return Wrap(context.TODO(), e, op, opt...) +} + // Convert will convert the error to a Boundary *Err (returning it as an error) // and attempt to add a helpful error msg as well. If that's not possible, it // will return nil diff --git a/internal/errors/error_test.go b/internal/errors/error_test.go index 76e88d3fe57..ee35e7c94a5 100644 --- a/internal/errors/error_test.go +++ b/internal/errors/error_test.go @@ -18,7 +18,6 @@ import ( func Test_ErrorE(t *testing.T) { t.Parallel() - ctx := context.Background() errRecordNotFound := errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.RecordNotFound)) tests := []struct { name string @@ -81,7 +80,7 @@ func Test_ErrorE(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - err := errors.E(ctx, tt.opt...) + err := errors.EDeprecated(tt.opt...) require.Error(err) assert.Equal(tt.want, err) @@ -103,7 +102,6 @@ func Test_ErrorE(t *testing.T) { func Test_NewError(t *testing.T) { t.Parallel() - ctx := context.Background() tests := []struct { name string code errors.Code @@ -118,11 +116,11 @@ func Test_NewError(t *testing.T) { op: "alice.Bob", msg: "test msg", opt: []errors.Option{ - errors.WithWrap(errors.E(ctx, errors.WithoutEvent(), errors.WithCode(errors.RecordNotFound))), + errors.WithWrap(errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.RecordNotFound))), }, want: &errors.Err{ Op: "alice.Bob", - Wrapped: errors.E(ctx, errors.WithoutEvent(), errors.WithCode(errors.RecordNotFound)), + Wrapped: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.RecordNotFound)), Msg: "test msg", Code: errors.InvalidParameter, }, @@ -170,7 +168,11 @@ func Test_NewError(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - err := errors.New(ctx, tt.code, tt.op, tt.msg, tt.opt...) + err := errors.NewDeprecated(tt.code, tt.op, tt.msg, tt.opt...) + require.Error(err) + assert.Equal(tt.want, err) + + err = errors.New(context.TODO(), tt.code, tt.op, tt.msg, tt.opt...) require.Error(err) assert.Equal(tt.want, err) }) @@ -179,8 +181,7 @@ func Test_NewError(t *testing.T) { func Test_WrapError(t *testing.T) { t.Parallel() - ctx := context.Background() - testErr := errors.E(ctx, errors.WithoutEvent(), errors.WithCode(errors.InvalidParameter), errors.WithOp("alice.Bob"), errors.WithMsg("test msg")) + testErr := errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.InvalidParameter), errors.WithOp("alice.Bob"), errors.WithMsg("test msg")) tests := []struct { name string opt []errors.Option @@ -261,7 +262,7 @@ func Test_WrapError(t *testing.T) { Message: "test msg", }, want: &errors.Err{ - Wrapped: errors.E(ctx, errors.WithoutEvent(), errors.WithCode(errors.NotSpecificIntegrity), errors.WithMsg("test msg")), + Wrapped: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.NotSpecificIntegrity), errors.WithMsg("test msg")), Code: errors.NotSpecificIntegrity, }, }, @@ -269,7 +270,11 @@ func Test_WrapError(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - err := errors.Wrap(ctx, tt.err, tt.op, tt.opt...) + err := errors.WrapDeprecated(tt.err, tt.op, tt.opt...) + require.Error(err) + assert.Equal(tt.want, err) + + err = errors.Wrap(context.TODO(), tt.err, tt.op, tt.opt...) require.Error(err) assert.Equal(tt.want, err) }) @@ -309,7 +314,6 @@ func TestError_Info(t *testing.T) { func TestError_Error(t *testing.T) { t.Parallel() - ctx := context.Background() tests := []struct { name string err error @@ -332,22 +336,22 @@ func TestError_Error(t *testing.T) { }, { name: "unknown", - err: errors.E(ctx), + err: errors.EDeprecated(), want: "unknown, unknown: error #0", }, { name: "wrapped-no-code", - err: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithWrap(errors.E(ctx, errors.WithCode(errors.InvalidParameter), errors.WithMsg("wrapped msg"))), errors.WithMsg("test msg")), + err: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithWrap(errors.EDeprecated(errors.WithCode(errors.InvalidParameter), errors.WithMsg("wrapped msg"))), errors.WithMsg("test msg")), want: "test msg: wrapped msg: parameter violation: error #100", }, { name: "wrapped-different-error-codes", - err: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.CheckConstraint), errors.WithWrap(errors.E(ctx, errors.WithCode(errors.InvalidParameter), errors.WithMsg("wrapped msg"))), errors.WithMsg("test msg")), + err: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.CheckConstraint), errors.WithWrap(errors.EDeprecated(errors.WithCode(errors.InvalidParameter), errors.WithMsg("wrapped msg"))), errors.WithMsg("test msg")), want: "test msg: integrity violation: error #1000: wrapped msg: parameter violation: error #100", }, { name: "wrapped-same-error-codes", - err: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.CheckConstraint), errors.WithWrap(errors.E(ctx, errors.WithCode(errors.CheckConstraint), errors.WithMsg("wrapped msg"))), errors.WithMsg("test msg")), + err: errors.E(context.TODO(), errors.WithoutEvent(), errors.WithCode(errors.CheckConstraint), errors.WithWrap(errors.EDeprecated(errors.WithCode(errors.CheckConstraint), errors.WithMsg("wrapped msg"))), errors.WithMsg("test msg")), want: "test msg: wrapped msg: integrity violation: error #1000", }, } @@ -368,9 +372,8 @@ func TestError_Error(t *testing.T) { func TestError_Unwrap(t *testing.T) { t.Parallel() - ctx := context.Background() - testErr := errors.E(ctx, errors.WithMsg("test error")) - errInvalidParameter := errors.E(ctx, errors.WithCode(errors.InvalidParameter), errors.WithMsg("test error")) + testErr := errors.EDeprecated(errors.WithMsg("test error")) + errInvalidParameter := errors.EDeprecated(errors.WithCode(errors.InvalidParameter), errors.WithMsg("test error")) tests := []struct { name string @@ -380,7 +383,7 @@ func TestError_Unwrap(t *testing.T) { }{ { name: "ErrInvalidParameter", - err: errors.E(ctx, errors.WithWrap(errInvalidParameter)), + err: errors.EDeprecated(errors.WithWrap(errInvalidParameter)), want: errInvalidParameter, wantIsErr: errInvalidParameter, }, @@ -411,8 +414,7 @@ func TestError_Unwrap(t *testing.T) { func TestConvertError(t *testing.T) { t.Parallel() - ctx := context.Background() - testErr := errors.E(ctx, errors.WithCode(errors.InvalidParameter), errors.WithOp("alice.Bob"), errors.WithMsg("test msg")) + testErr := errors.EDeprecated(errors.WithCode(errors.InvalidParameter), errors.WithOp("alice.Bob"), errors.WithMsg("test msg")) const ( createTable = ` create table if not exists test_table ( @@ -426,6 +428,7 @@ func TestConvertError(t *testing.T) { insert = `insert into test_table(name, description, five) values (?, ?, ?)` missingTable = `select * from not_a_defined_table` ) + ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) @@ -452,7 +455,7 @@ func TestConvertError(t *testing.T) { e: &pgconn.PgError{ Code: "23001", }, - wantErr: errors.E(ctx, errors.WithCode(errors.NotSpecificIntegrity)), + wantErr: errors.EDeprecated(errors.WithCode(errors.NotSpecificIntegrity)), }, { name: "convert-domain-error", diff --git a/internal/gen/controller.swagger.json b/internal/gen/controller.swagger.json index 082568edbfb..632772f5a5c 100644 --- a/internal/gen/controller.swagger.json +++ b/internal/gen/controller.swagger.json @@ -5248,15 +5248,7 @@ "properties": { "id": { "type": "string", - "description": "Output only. The ID, if set.\nDeprecated: use \"ids\" instead.", - "readOnly": true - }, - "ids": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Output only. The IDs, if set.", + "description": "Output only. The ID, if set.", "readOnly": true }, "type": { @@ -5686,110 +5678,6 @@ }, "description": "ConnectionRecording contains the recording of a single Connection within a Session." }, - "controller.api.resources.sessionrecordings.v1.Credential": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "The ID of the Credential." - }, - "credential_store": { - "$ref": "#/definitions/controller.api.resources.sessionrecordings.v1.CredentialStore", - "description": "The Credential Store of which this Credential is a part." - }, - "name": { - "type": "string", - "description": "The name of the credential." - }, - "description": { - "type": "string", - "description": "Optional user-set description." - }, - "purposes": { - "type": "array", - "items": { - "type": "string" - }, - "description": "The purposes for which this Credential was attached to the sesssion." - }, - "type": { - "type": "string", - "description": "The Credential type." - }, - "attributes": { - "type": "object", - "description": "The attributes that are applicable for the specific Credential type." - } - }, - "title": "Credential contains fields related to an Credential resource" - }, - "controller.api.resources.sessionrecordings.v1.CredentialLibrary": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "The ID of the Credential Library." - }, - "credential_store": { - "$ref": "#/definitions/controller.api.resources.sessionrecordings.v1.CredentialStore", - "description": "The credential store of which this library is a part." - }, - "name": { - "type": "string", - "description": "Optional name of this Credential Library." - }, - "description": { - "type": "string", - "description": "Optional user-set description of this Credential Library." - }, - "purposes": { - "type": "array", - "items": { - "type": "string" - }, - "description": "The purposes for which this CredentialLibrary was attached to the sesssion." - }, - "type": { - "type": "string", - "description": "The Credential Library type." - }, - "attributes": { - "type": "object", - "description": "The attributes that are applicable for the specific Credential Library type." - } - }, - "title": "CredentialLibrary contains all fields related to an Credential Library resource" - }, - "controller.api.resources.sessionrecordings.v1.CredentialStore": { - "type": "object", - "properties": { - "id": { - "type": "string", - "description": "The ID of the Credential Store." - }, - "scope_id": { - "type": "string", - "description": "The ID of the Scope of which this Credential Store is a part." - }, - "name": { - "type": "string", - "description": "The name for identification purposes if set." - }, - "description": { - "type": "string", - "description": "The description for identification purposes if set." - }, - "type": { - "type": "string", - "description": "The Credential Store type." - }, - "attributes": { - "type": "object", - "description": "The attributes that are applicable for the specific Credential Store type." - } - }, - "title": "CredentialStore contains all fields related to a Credential Store resource" - }, "controller.api.resources.sessionrecordings.v1.Host": { "type": "object", "properties": { @@ -6055,22 +5943,6 @@ "host": { "$ref": "#/definitions/controller.api.resources.sessionrecordings.v1.Host", "description": "Information about the Host chosen for the session." - }, - "credentials": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/controller.api.resources.sessionrecordings.v1.Credential" - }, - "description": "Information about the Credentials used for this session." - }, - "credential_libraries": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/controller.api.resources.sessionrecordings.v1.CredentialLibrary" - }, - "description": "Information about the Credential Libraries used for this session." } }, "description": "ValuesAtTime contain information about other Boundary resources as they\nwere at a certain time through the lifetime of the Session Recording." diff --git a/internal/host/plugin/host_set_member_test.go b/internal/host/plugin/host_set_member_test.go index c5f2981cf64..ba893b6f86e 100644 --- a/internal/host/plugin/host_set_member_test.go +++ b/internal/host/plugin/host_set_member_test.go @@ -39,7 +39,7 @@ func TestHostSetMember_InsertDelete(t *testing.T) { plg.GetPublicId(): loopback.NewWrappingPluginHostClient(&plgpb.UnimplementedHostPluginServiceServer{}), } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) require.NoError(t, err) cats := TestCatalogs(t, conn, prj.PublicId, plg.PublicId, 2) @@ -49,25 +49,25 @@ func TestHostSetMember_InsertDelete(t *testing.T) { blueSet2 := TestSet(t, conn, kms, sched, blueCat, plgm) blueSet3 := TestSet(t, conn, kms, sched, blueCat, plgm) - hostId, err := db.NewPublicId(ctx, globals.PluginHostPrefix) + hostId, err := db.NewPublicId(globals.PluginHostPrefix) require.NoError(t, err) blueHost1 := NewHost(ctx, blueCat.PublicId, "blue1", withPluginId(plg.GetPublicId())) blueHost1.PublicId = hostId require.NoError(t, rw.Create(ctx, blueHost1)) - hostId, err = db.NewPublicId(ctx, globals.PluginHostPrefix) + hostId, err = db.NewPublicId(globals.PluginHostPrefix) require.NoError(t, err) blueHost2 := NewHost(ctx, blueCat.PublicId, "blue2", withPluginId(plg.GetPublicId())) blueHost2.PublicId = hostId require.NoError(t, rw.Create(ctx, blueHost2)) - hostId, err = db.NewPublicId(ctx, globals.PluginHostPrefix) + hostId, err = db.NewPublicId(globals.PluginHostPrefix) require.NoError(t, err) blueHost3 := NewHost(ctx, blueCat.PublicId, "blue3", withPluginId(plg.GetPublicId())) blueHost3.PublicId = hostId require.NoError(t, rw.Create(ctx, blueHost3)) - hostId, err = db.NewPublicId(ctx, globals.PluginHostPrefix) + hostId, err = db.NewPublicId(globals.PluginHostPrefix) require.NoError(t, err) blueHost4 := NewHost(ctx, blueCat.PublicId, "blue4", withPluginId(plg.GetPublicId())) blueHost4.PublicId = hostId diff --git a/internal/host/plugin/ids.go b/internal/host/plugin/ids.go index 80ceaa6096f..35f4643b2f9 100644 --- a/internal/host/plugin/ids.go +++ b/internal/host/plugin/ids.go @@ -25,7 +25,7 @@ const ( ) func newHostCatalogId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.PluginHostCatalogPrefix) + id, err := db.NewPublicId(globals.PluginHostCatalogPrefix) if err != nil { return "", errors.Wrap(ctx, err, "plugin.newHostCatalogId") } @@ -33,7 +33,7 @@ func newHostCatalogId(ctx context.Context) (string, error) { } func newHostSetId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.PluginHostSetPrefix) + id, err := db.NewPublicId(globals.PluginHostSetPrefix) if err != nil { return "", errors.Wrap(ctx, err, "plugin.newHostSetId") } @@ -48,7 +48,7 @@ func newHostId(ctx context.Context, catalogId, externalId string) (string, error if externalId == "" { return "", errors.New(ctx, errors.InvalidParameter, op, "missing external id") } - id, err := db.NewPublicId(ctx, globals.PluginHostPrefix, db.WithPrngValues([]string{catalogId, externalId})) + id, err := db.NewPublicId(globals.PluginHostPrefix, db.WithPrngValues([]string{catalogId, externalId})) if err != nil { return "", errors.Wrap(ctx, err, op) } diff --git a/internal/host/plugin/job_set_sync.go b/internal/host/plugin/job_set_sync.go index 6d7c19c29f8..c4c05a2df3f 100644 --- a/internal/host/plugin/job_set_sync.go +++ b/internal/host/plugin/job_set_sync.go @@ -121,7 +121,7 @@ func (r *SetSyncJob) NextRunIn(ctx context.Context) (time.Duration, error) { const op = "plugin.(SetSyncJob).NextRunIn" next, err := nextSync(ctx, r) if err != nil { - return setSyncJobRunInterval, errors.Wrap(ctx, err, op) + return setSyncJobRunInterval, errors.WrapDeprecated(err, op) } return next, nil } @@ -145,12 +145,12 @@ func nextSync(ctx context.Context, j scheduler.Job) (time.Duration, error) { query = setSyncNextRunInQuery r = job.reader default: - return 0, errors.New(ctx, errors.Unknown, op, "unknown job") + return 0, errors.NewDeprecated(errors.Unknown, op, "unknown job") } rows, err := r.Query(context.Background(), query, []any{setSyncJobRunInterval}) if err != nil { - return 0, errors.Wrap(ctx, err, op) + return 0, errors.WrapDeprecated(err, op) } defer rows.Close() @@ -166,7 +166,7 @@ func nextSync(ctx context.Context, j scheduler.Job) (time.Duration, error) { var n NextResync err = r.ScanRows(ctx, rows, &n) if err != nil { - return 0, errors.Wrap(ctx, err, op) + return 0, errors.WrapDeprecated(err, op) } switch { case n.SyncNow: diff --git a/internal/host/plugin/job_set_sync_test.go b/internal/host/plugin/job_set_sync_test.go index b8ded4634f9..6e51b2f836e 100644 --- a/internal/host/plugin/job_set_sync_test.go +++ b/internal/host/plugin/job_set_sync_test.go @@ -206,7 +206,7 @@ func TestSetSyncJob_Run(t *testing.T) { }, nil } - hostRepo, err := NewRepository(ctx, rw, rw, kmsCache, sche, plgm) + hostRepo, err := NewRepository(rw, rw, kmsCache, sche, plgm) require.NoError(err) hsa = &hostSetAgg{PublicId: set1.GetPublicId()} diff --git a/internal/host/plugin/repository.go b/internal/host/plugin/repository.go index 4fba7bbd6cb..385389c88bb 100644 --- a/internal/host/plugin/repository.go +++ b/internal/host/plugin/repository.go @@ -7,8 +7,6 @@ package plugin import ( - "context" - "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/host" @@ -36,24 +34,24 @@ type Repository struct { // only be used for one transaction and it is not safe for concurrent go // routines to access it. WithLimit option is used as a repo wide default // limit applied to all ListX methods. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, sched *scheduler.Scheduler, plgm map[string]plgpb.HostPluginServiceClient, opt ...host.Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, sched *scheduler.Scheduler, plgm map[string]plgpb.HostPluginServiceClient, opt ...host.Option) (*Repository, error) { const op = "plugin.NewRepository" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "kms") case sched == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "scheduler") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "scheduler") case plgm == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "plgm") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "plgm") } opts, err := host.GetOpts(opt...) if err != nil { - return nil, errors.Wrap(ctx, err, op) + return nil, errors.WrapDeprecated(err, op) } if opts.WithLimit == 0 { // zero signals the boundary defaults should be used. diff --git a/internal/host/plugin/repository_host_catalog.go b/internal/host/plugin/repository_host_catalog.go index eb70b7250e0..fabe5515731 100644 --- a/internal/host/plugin/repository_host_catalog.go +++ b/internal/host/plugin/repository_host_catalog.go @@ -130,9 +130,6 @@ func (r *Repository) CreateCatalog(ctx context.Context, c *HostCatalog, _ ...Opt if err := normalizeCatalogAttributes(ctx, plgClient, plgHc); err != nil { return nil, nil, errors.Wrap(ctx, err, op) } - if c.Attributes, err = proto.Marshal(plgHc.GetAttributes()); err != nil { - return nil, nil, errors.Wrap(ctx, err, op) - } } oplogWrapper, err := r.kms.GetWrapper(ctx, c.ProjectId, kms.KeyPurposeOplog) diff --git a/internal/host/plugin/repository_host_catalog_test.go b/internal/host/plugin/repository_host_catalog_test.go index 03ffc5ce3df..7a42eb029b3 100644 --- a/internal/host/plugin/repository_host_catalog_test.go +++ b/internal/host/plugin/repository_host_catalog_test.go @@ -189,7 +189,7 @@ func TestRepository_CreateCatalog(t *testing.T) { PluginId: plg.GetPublicId(), Attributes: func() []byte { st, err := structpb.NewStruct(map[string]any{ - "k1": nil, + "k1": "foo", "nilkey": nil, normalizeToSliceKey: "normalizeme", }) @@ -205,14 +205,9 @@ func TestRepository_CreateCatalog(t *testing.T) { ProjectId: prj.GetPublicId(), PluginId: plg.GetPublicId(), Attributes: func() []byte { - b, err := proto.Marshal(&structpb.Struct{Fields: map[string]*structpb.Value{ - normalizeToSliceKey: structpb.NewListValue( - &structpb.ListValue{ - Values: []*structpb.Value{ - structpb.NewStringValue("normalizeme"), - }, - }), - }}) + st, err := structpb.NewStruct(map[string]any{"k1": "foo"}) + require.NoError(t, err) + b, err := proto.Marshal(st) require.NoError(t, err) return b }(), @@ -310,7 +305,7 @@ func TestRepository_CreateCatalog(t *testing.T) { }, }}, } - repo, err := NewRepository(ctx, rw, rw, kmsCache, sched, plgm) + repo, err := NewRepository(rw, rw, kmsCache, sched, plgm) assert.NoError(err) assert.NotNil(repo) got, _, err := repo.CreateCatalog(ctx, tt.in, tt.opts...) @@ -328,7 +323,6 @@ func TestRepository_CreateCatalog(t *testing.T) { assert.Equal(tt.want.Name, got.Name) assert.Equal(tt.want.Description, got.Description) assert.Equal(got.CreateTime, got.UpdateTime) - assert.Equal(tt.want.Attributes, got.Attributes) if origPluginAttrs != nil { if normalizeVal := origPluginAttrs.Fields[normalizeToSliceKey]; normalizeVal != nil { @@ -388,7 +382,7 @@ func TestRepository_CreateCatalog(t *testing.T) { }, }, } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) assert.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -434,7 +428,7 @@ func TestRepository_CreateCatalog(t *testing.T) { }, }, } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) assert.NotNil(repo) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -1228,7 +1222,7 @@ func TestRepository_UpdateCatalog(t *testing.T) { } pluginError = tt.withPluginError t.Cleanup(func() { pluginError = nil }) - repo, err := NewRepository(ctx, dbRW, dbRW, dbKmsCache, sched, pluginMap) + repo, err := NewRepository(dbRW, dbRW, dbKmsCache, sched, pluginMap) require.NoError(err) require.NotNil(repo) @@ -1307,7 +1301,7 @@ func TestRepository_LookupCatalog(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) assert.NotNil(repo) @@ -1331,7 +1325,6 @@ func TestRepository_LookupCatalog(t *testing.T) { func TestRepository_ListCatalogs_Multiple_Scopes(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) rw := db.New(conn) @@ -1341,7 +1334,7 @@ func TestRepository_ListCatalogs_Multiple_Scopes(t *testing.T) { plgm := map[string]plgpb.HostPluginServiceClient{ plg.GetPublicId(): &loopback.WrappingPluginHostClient{Server: &loopback.TestPluginServer{}}, } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(t, err) assert.NotNil(t, repo) @@ -1382,7 +1375,7 @@ func TestRepository_DeleteCatalog(t *testing.T) { assert.NotNil(t, badId) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(t, err) assert.NotNil(t, repo) @@ -1494,7 +1487,7 @@ func TestRepository_DeleteCatalogX(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) assert.NotNil(repo) @@ -1576,12 +1569,12 @@ func TestRepository_UpdateCatalog_SyncSets(t *testing.T) { err = sched.RegisterJob(ctx, j, scheduler.WithNextRunIn(setSyncJobRunInterval)) require.NoError(t, err) - repo, err := NewRepository(ctx, dbRW, dbRW, dbKmsCache, sched, dummyPluginMap) + repo, err := NewRepository(dbRW, dbRW, dbKmsCache, sched, dummyPluginMap) require.NoError(t, err) require.NotNil(t, repo) // Load the job repository here so that we can validate run times. - jobRepo, err := job.NewRepository(ctx, dbRW, dbRW, dbKmsCache) + jobRepo, err := job.NewRepository(dbRW, dbRW, dbKmsCache) require.NoError(t, err) require.NotNil(t, repo) diff --git a/internal/host/plugin/repository_host_set.go b/internal/host/plugin/repository_host_set.go index 7a5a5787740..deec2cd9d4e 100644 --- a/internal/host/plugin/repository_host_set.go +++ b/internal/host/plugin/repository_host_set.go @@ -132,9 +132,6 @@ func (r *Repository) CreateSet(ctx context.Context, projectId string, s *HostSet if err := normalizeSetAttributes(ctx, plgClient, plgHs); err != nil { return nil, nil, errors.Wrap(ctx, err, op) } - if s.Attributes, err = proto.Marshal(plgHs.GetAttributes()); err != nil { - return nil, nil, errors.Wrap(ctx, err, op) - } } var preferredEndpoints []any diff --git a/internal/host/plugin/repository_host_set_test.go b/internal/host/plugin/repository_host_set_test.go index f30dac11cb5..dddd04f3611 100644 --- a/internal/host/plugin/repository_host_set_test.go +++ b/internal/host/plugin/repository_host_set_test.go @@ -37,7 +37,6 @@ import ( ) func TestRepository_CreateSet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -243,7 +242,7 @@ func TestRepository_CreateSet(t *testing.T) { Description: ("test-description-repo"), Attributes: func() []byte { st, err := structpb.NewStruct(map[string]any{ - "k1": nil, + "k1": "foo", "removed": nil, normalizeToSliceKey: "normalizeme", }) @@ -259,14 +258,7 @@ func TestRepository_CreateSet(t *testing.T) { CatalogId: catalog.PublicId, Description: ("test-description-repo"), Attributes: func() []byte { - b, err := proto.Marshal(&structpb.Struct{Fields: map[string]*structpb.Value{ - normalizeToSliceKey: structpb.NewListValue( - &structpb.ListValue{ - Values: []*structpb.Value{ - structpb.NewStringValue("normalizeme"), - }, - }), - }}) + b, err := proto.Marshal(&structpb.Struct{Fields: map[string]*structpb.Value{"k1": structpb.NewStringValue("foo")}}) require.NoError(t, err) return b }(), @@ -317,10 +309,10 @@ func TestRepository_CreateSet(t *testing.T) { return plgpb.UnimplementedHostPluginServiceServer{}.OnCreateSet(ctx, req) }}), } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) require.NoError(err) require.NotNil(repo) - got, plgInfo, err := repo.CreateSet(ctx, prj.GetPublicId(), tt.in, tt.opts...) + got, plgInfo, err := repo.CreateSet(context.Background(), prj.GetPublicId(), tt.in, tt.opts...) assert.Equal(tt.wantPluginCalled, pluginCalled) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) @@ -335,7 +327,6 @@ func TestRepository_CreateSet(t *testing.T) { assert.Equal(tt.want.Name, got.GetName()) assert.Equal(tt.want.Description, got.GetDescription()) assert.Equal(got.GetCreateTime(), got.GetUpdateTime()) - assert.Equal(string(tt.want.GetAttributes()), string(got.GetAttributes())) if origPluginAttrs != nil { if normalizeVal := origPluginAttrs.Fields[normalizeToSliceKey]; normalizeVal != nil { @@ -371,7 +362,7 @@ func TestRepository_CreateSet(t *testing.T) { return &plgpb.OnCreateSetResponse{}, nil }}), } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) require.NoError(err) require.NotNil(repo) @@ -414,7 +405,7 @@ func TestRepository_CreateSet(t *testing.T) { return &plgpb.OnCreateSetResponse{}, nil }}), } - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) require.NoError(err) require.NotNil(repo) @@ -1245,7 +1236,7 @@ func TestRepository_UpdateSet(t *testing.T) { if tt.withEmptyPluginMap { pluginMap = make(map[string]plgpb.HostPluginServiceClient) } - repo, err := NewRepository(ctx, dbRW, dbRW, dbKmsCache, sched, pluginMap) + repo, err := NewRepository(dbRW, dbRW, dbKmsCache, sched, pluginMap) require.NoError(err) require.NotNil(repo) @@ -1330,7 +1321,7 @@ func TestRepository_UpdateSet(t *testing.T) { origSet, _ := setupBareHostSet(t, ctx) pluginMap := testPluginMap - repo, err := NewRepository(ctx, dbRW, dbRW, dbKmsCache, sched, pluginMap) + repo, err := NewRepository(dbRW, dbRW, dbKmsCache, sched, pluginMap) require.NoError(t, err) require.NotNil(t, repo) @@ -1402,7 +1393,7 @@ func TestRepository_LookupSet(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) require.NotNil(repo) got, _, err := repo.LookupSet(ctx, tt.in) @@ -1519,7 +1510,7 @@ func TestRepository_Endpoints(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) require.NotNil(repo) got, err := repo.Endpoints(ctx, tt.setIds) @@ -1557,7 +1548,6 @@ func TestRepository_Endpoints(t *testing.T) { } func TestRepository_ListSets(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1607,10 +1597,10 @@ func TestRepository_ListSets(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) require.NotNil(repo) - got, gotPlg, err := repo.ListSets(ctx, tt.in, tt.opts...) + got, gotPlg, err := repo.ListSets(context.Background(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -1630,7 +1620,6 @@ func TestRepository_ListSets(t *testing.T) { } func TestRepository_ListSets_Limits(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1697,10 +1686,10 @@ func TestRepository_ListSets_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, sched, plgm, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) - got, gotPlg, err := repo.ListSets(ctx, hostSets[0].CatalogId, tt.listOpts...) + got, gotPlg, err := repo.ListSets(context.Background(), hostSets[0].CatalogId, tt.listOpts...) require.NoError(err) assert.Len(got, tt.wantLen) assert.Empty(cmp.Diff(plg, gotPlg, protocmp.Transform())) @@ -1780,7 +1769,7 @@ func TestRepository_DeleteSet(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) assert.NoError(err) require.NotNil(repo) got, err := repo.DeleteSet(ctx, prj.PublicId, tt.in) diff --git a/internal/host/plugin/repository_host_test.go b/internal/host/plugin/repository_host_test.go index 268e173db42..b1e0ecaa1c3 100644 --- a/internal/host/plugin/repository_host_test.go +++ b/internal/host/plugin/repository_host_test.go @@ -224,7 +224,7 @@ func TestJob_UpsertHosts(t *testing.T) { ), ) - repo, err := NewRepository(ctx, rw, rw, kms, sched, plgm) + repo, err := NewRepository(rw, rw, kms, sched, plgm) require.NoError(err) require.NotNil(repo) diff --git a/internal/host/plugin/repository_test.go b/internal/host/plugin/repository_test.go index 61e2107584d..50b9453bf68 100644 --- a/internal/host/plugin/repository_test.go +++ b/internal/host/plugin/repository_test.go @@ -4,7 +4,6 @@ package plugin import ( - "context" "testing" "github.com/hashicorp/boundary/internal/scheduler" @@ -156,7 +155,7 @@ func TestRepository_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRepository(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.args.scheduler, tt.args.plugins, tt.args.opts...) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms, tt.args.scheduler, tt.args.plugins, tt.args.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) diff --git a/internal/host/plugin/testing.go b/internal/host/plugin/testing.go index a637d5490a6..436d8ea709d 100644 --- a/internal/host/plugin/testing.go +++ b/internal/host/plugin/testing.go @@ -70,7 +70,7 @@ func TestSet(t testing.TB, conn *db.DB, kmsCache *kms.Kms, sched *scheduler.Sche ctx := context.Background() rw := db.New(conn) - repo, err := NewRepository(ctx, rw, rw, kmsCache, sched, plgm) + repo, err := NewRepository(rw, rw, kmsCache, sched, plgm) require.NoError(err) set, err := NewHostSet(ctx, hc.PublicId, opt...) diff --git a/internal/host/static/example_test.go b/internal/host/static/example_test.go index 396d1ae63ca..ec9e05d957c 100644 --- a/internal/host/static/example_test.go +++ b/internal/host/static/example_test.go @@ -4,7 +4,6 @@ package static_test import ( - "context" "fmt" "github.com/hashicorp/boundary/internal/host/static" @@ -12,7 +11,7 @@ import ( func ExampleNewHostCatalog() { projectPublicId := "p_1234" - catalog, _ := static.NewHostCatalog(context.Background(), projectPublicId, static.WithName("my catalog")) + catalog, _ := static.NewHostCatalog(projectPublicId, static.WithName("my catalog")) fmt.Println(catalog.Name) // Output: // my catalog @@ -20,7 +19,7 @@ func ExampleNewHostCatalog() { func ExampleNewHost() { catalogPublicId := "hcst_1234" - host, _ := static.NewHost(context.Background(), catalogPublicId, static.WithAddress("127.0.0.1")) + host, _ := static.NewHost(catalogPublicId, static.WithAddress("127.0.0.1")) fmt.Println(host.Address) // Output: // 127.0.0.1 @@ -28,7 +27,7 @@ func ExampleNewHost() { func ExampleNewHostSet() { catalogPublicId := "hcst_1234" - set, _ := static.NewHostSet(context.Background(), catalogPublicId, static.WithName("my host set")) + set, _ := static.NewHostSet(catalogPublicId, static.WithName("my host set")) fmt.Println(set.Name) // Output: // my host set @@ -37,7 +36,7 @@ func ExampleNewHostSet() { func ExampleNewHostSetMember() { setPublicId := "hsst_11111" hostPublicId := "hst_22222" - member, _ := static.NewHostSetMember(context.Background(), setPublicId, hostPublicId) + member, _ := static.NewHostSetMember(setPublicId, hostPublicId) fmt.Println(member.SetId) fmt.Println(member.HostId) } diff --git a/internal/host/static/host.go b/internal/host/static/host.go index 49fff96d39a..fa9db140e1e 100644 --- a/internal/host/static/host.go +++ b/internal/host/static/host.go @@ -4,7 +4,6 @@ package static import ( - "context" "sort" "strings" @@ -30,9 +29,9 @@ type Host struct { // NewHost creates a new in memory Host for address assigned to catalogId. // Name and description are the only valid options. All other options are // ignored. -func NewHost(ctx context.Context, catalogId string, opt ...Option) (*Host, error) { +func NewHost(catalogId string, opt ...Option) (*Host, error) { if catalogId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, "static.NewHost", "no catalog id") + return nil, errors.NewDeprecated(errors.InvalidParameter, "static.NewHost", "no catalog id") } opts := getOpts(opt...) diff --git a/internal/host/static/host_catalog.go b/internal/host/static/host_catalog.go index bd75089c4e7..d5f6f227fd0 100644 --- a/internal/host/static/host_catalog.go +++ b/internal/host/static/host_catalog.go @@ -4,8 +4,6 @@ package static import ( - "context" - "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/host/static/store" "github.com/hashicorp/boundary/internal/oplog" @@ -22,9 +20,9 @@ type HostCatalog struct { // NewHostCatalog creates a new in memory HostCatalog assigned to projectId. // Name and description are the only valid options. All other options are // ignored. -func NewHostCatalog(ctx context.Context, projectId string, opt ...Option) (*HostCatalog, error) { +func NewHostCatalog(projectId string, opt ...Option) (*HostCatalog, error) { if projectId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, "static.NewHostCatalog", "no project id") + return nil, errors.NewDeprecated(errors.InvalidParameter, "static.NewHostCatalog", "no project id") } opts := getOpts(opt...) diff --git a/internal/host/static/host_catalog_test.go b/internal/host/static/host_catalog_test.go index f219d31e0fe..f90f255358b 100644 --- a/internal/host/static/host_catalog_test.go +++ b/internal/host/static/host_catalog_test.go @@ -16,7 +16,6 @@ import ( ) func TestHostCatalog_New(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -87,7 +86,7 @@ func TestHostCatalog_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - got, err := NewHostCatalog(ctx, tt.args.projectId, tt.args.opts...) + got, err := NewHostCatalog(tt.args.projectId, tt.args.opts...) if tt.wantErr { assert.Error(err) assert.Nil(got) @@ -97,14 +96,14 @@ func TestHostCatalog_New(t *testing.T) { assert.Emptyf(got.PublicId, "PublicId set") assert.Equal(tt.want, got) - id, err := newHostCatalogId(ctx) + id, err := newHostCatalogId() assert.NoError(err) tt.want.PublicId = id got.PublicId = id w := db.New(conn) - err2 := w.Create(ctx, got) + err2 := w.Create(context.Background(), got) assert.NoError(err2) } } diff --git a/internal/host/static/host_set.go b/internal/host/static/host_set.go index 215726069cd..d933f542f86 100644 --- a/internal/host/static/host_set.go +++ b/internal/host/static/host_set.go @@ -4,8 +4,6 @@ package static import ( - "context" - "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/host/static/store" "github.com/hashicorp/boundary/internal/oplog" @@ -21,9 +19,9 @@ type HostSet struct { // NewHostSet creates a new in memory HostSet assigned to catalogId. // Name and description are the only valid options. All other options are // ignored. -func NewHostSet(ctx context.Context, catalogId string, opt ...Option) (*HostSet, error) { +func NewHostSet(catalogId string, opt ...Option) (*HostSet, error) { if catalogId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, "static.NewHostSet", "no catalog id") + return nil, errors.NewDeprecated(errors.InvalidParameter, "static.NewHostSet", "no catalog id") } opts := getOpts(opt...) diff --git a/internal/host/static/host_set_member.go b/internal/host/static/host_set_member.go index e02b35a74a1..f16ad296eeb 100644 --- a/internal/host/static/host_set_member.go +++ b/internal/host/static/host_set_member.go @@ -4,8 +4,6 @@ package static import ( - "context" - "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/host/static/store" ) @@ -18,13 +16,13 @@ type HostSetMember struct { // NewHostSetMember creates a new in memory HostSetMember representing the // membership of hostId in hostSetId. -func NewHostSetMember(ctx context.Context, hostSetId, hostId string, opt ...Option) (*HostSetMember, error) { +func NewHostSetMember(hostSetId, hostId string, opt ...Option) (*HostSetMember, error) { const op = "static.NewHostSetMember" if hostSetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no host set id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no host set id") } if hostId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no host id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no host id") } member := &HostSetMember{ HostSetMember: &store.HostSetMember{ diff --git a/internal/host/static/host_set_member_test.go b/internal/host/static/host_set_member_test.go index dd043495697..2c0365b7c26 100644 --- a/internal/host/static/host_set_member_test.go +++ b/internal/host/static/host_set_member_test.go @@ -15,7 +15,6 @@ import ( ) func TestHostSetMember_Insert(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -51,11 +50,11 @@ func TestHostSetMember_Insert(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewHostSetMember(ctx, tt.set.PublicId, tt.host.PublicId) + got, err := NewHostSetMember(tt.set.PublicId, tt.host.PublicId) require.NoError(err) require.NotNil(got) w := db.New(conn) - err2 := w.Create(ctx, got) + err2 := w.Create(context.Background(), got) if tt.wantErr { assert.Error(err2) return diff --git a/internal/host/static/host_set_test.go b/internal/host/static/host_set_test.go index 43629381415..b6003dd6af7 100644 --- a/internal/host/static/host_set_test.go +++ b/internal/host/static/host_set_test.go @@ -15,7 +15,6 @@ import ( ) func TestHostSet_New(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres", db.WithTestLogLevel(t, db.SilentTestLogLevel)) wrapper := db.TestWrapper(t) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -86,7 +85,7 @@ func TestHostSet_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - got, err := NewHostSet(ctx, tt.args.catalogId, tt.args.opts...) + got, err := NewHostSet(tt.args.catalogId, tt.args.opts...) if tt.wantErr { assert.Error(err) assert.Nil(got) @@ -96,14 +95,14 @@ func TestHostSet_New(t *testing.T) { assert.Emptyf(got.PublicId, "PublicId set") assert.Equal(tt.want, got) - id, err := newHostSetId(ctx) + id, err := newHostSetId() assert.NoError(err) tt.want.PublicId = id got.PublicId = id w := db.New(conn) - err2 := w.Create(ctx, got) + err2 := w.Create(context.Background(), got) assert.NoError(err2) } } diff --git a/internal/host/static/host_test.go b/internal/host/static/host_test.go index 736965e0e9e..37be1406503 100644 --- a/internal/host/static/host_test.go +++ b/internal/host/static/host_test.go @@ -15,7 +15,6 @@ import ( ) func TestHost_New(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres", db.WithTestLogLevel(t, db.SilentTestLogLevel)) wrapper := db.TestWrapper(t) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -132,7 +131,7 @@ func TestHost_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - got, err := NewHost(ctx, tt.args.catalogId, tt.args.opts...) + got, err := NewHost(tt.args.catalogId, tt.args.opts...) if tt.wantCreateErr { assert.Error(err) assert.Nil(got) @@ -142,14 +141,14 @@ func TestHost_New(t *testing.T) { assert.Emptyf(got.PublicId, "PublicId set") assert.Equal(tt.want, got) - id, err := newHostId(ctx) + id, err := newHostId() assert.NoError(err) tt.want.PublicId = id got.PublicId = id w := db.New(conn) - err2 := w.Create(ctx, got) + err2 := w.Create(context.Background(), got) if tt.wantWriteErr { assert.Error(err2) } diff --git a/internal/host/static/immutable_fields_test.go b/internal/host/static/immutable_fields_test.go index b10462b516c..42327f9f7ae 100644 --- a/internal/host/static/immutable_fields_test.go +++ b/internal/host/static/immutable_fields_test.go @@ -230,7 +230,6 @@ func (c *HostSet) testCloneHostSet() *HostSet { func TestStaticHostSetMember_ImmutableFields(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") w := db.New(conn) wrapper := db.TestWrapper(t) @@ -240,9 +239,9 @@ func TestStaticHostSetMember_ImmutableFields(t *testing.T) { sets := TestSets(t, conn, cat.GetPublicId(), 1) hosts := TestHosts(t, conn, cat.GetPublicId(), 1) - new, err := NewHostSetMember(ctx, sets[0].PublicId, hosts[0].PublicId) + new, err := NewHostSetMember(sets[0].PublicId, hosts[0].PublicId) require.NoError(t, err) - err = w.Create(ctx, new) + err = w.Create(context.Background(), new) assert.NoError(t, err) tests := []struct { @@ -277,12 +276,12 @@ func TestStaticHostSetMember_ImmutableFields(t *testing.T) { err = w.LookupWhere(context.Background(), orig, "host_id = ? and set_id = ?", []any{orig.HostId, orig.SetId}) require.NoError(err) - rowsUpdated, err := w.Update(ctx, tt.update, tt.fieldMask, nil, db.WithSkipVetForWrite(true)) + rowsUpdated, err := w.Update(context.Background(), tt.update, tt.fieldMask, nil, db.WithSkipVetForWrite(true)) require.Error(err) assert.Equal(0, rowsUpdated) after := new.testCloneHostSetMember() - err = w.LookupWhere(ctx, after, "host_id = ? and set_id = ?", []any{after.HostId, after.SetId}) + err = w.LookupWhere(context.Background(), after, "host_id = ? and set_id = ?", []any{after.HostId, after.SetId}) require.NoError(err) assert.True(proto.Equal(orig, after)) diff --git a/internal/host/static/public_ids.go b/internal/host/static/public_ids.go index 4b7685c4a02..d49a05e0b78 100644 --- a/internal/host/static/public_ids.go +++ b/internal/host/static/public_ids.go @@ -4,8 +4,6 @@ package static import ( - "context" - "github.com/hashicorp/boundary/globals" "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" @@ -24,26 +22,26 @@ const ( Subtype = subtypes.Subtype("static") ) -func newHostCatalogId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.StaticHostCatalogPrefix) +func newHostCatalogId() (string, error) { + id, err := db.NewPublicId(globals.StaticHostCatalogPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "static.newHostCatalogId") + return "", errors.WrapDeprecated(err, "static.newHostCatalogId") } return id, nil } -func newHostId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.StaticHostPrefix) +func newHostId() (string, error) { + id, err := db.NewPublicId(globals.StaticHostPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "static.newHostId") + return "", errors.WrapDeprecated(err, "static.newHostId") } return id, nil } -func newHostSetId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.StaticHostSetPrefix) +func newHostSetId() (string, error) { + id, err := db.NewPublicId(globals.StaticHostSetPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "static.newHostSetId") + return "", errors.WrapDeprecated(err, "static.newHostSetId") } return id, nil } diff --git a/internal/host/static/repository.go b/internal/host/static/repository.go index 73660aa6fa7..18b8a2e4a3b 100644 --- a/internal/host/static/repository.go +++ b/internal/host/static/repository.go @@ -4,8 +4,6 @@ package static import ( - "context" - "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/kms" @@ -26,15 +24,15 @@ type Repository struct { // only be used for one transaction and it is not safe for concurrent go // routines to access it. WithLimit option is used as a repo wide default // limit applied to all ListX methods. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { const op = "static.NewRepository" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "db.Reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "db.Writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "db.Writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "kms") } opts := getOpts(opt...) diff --git a/internal/host/static/repository_host.go b/internal/host/static/repository_host.go index 87a3d528f3a..e8a39f16d9a 100644 --- a/internal/host/static/repository_host.go +++ b/internal/host/static/repository_host.go @@ -60,7 +60,7 @@ func (r *Repository) CreateHost(ctx context.Context, projectId string, h *Host, } h.PublicId = opts.withPublicId } else { - id, err := newHostId(ctx) + id, err := newHostId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/host/static/repository_host_catalog.go b/internal/host/static/repository_host_catalog.go index 12b8cb2c18c..b81704807e1 100644 --- a/internal/host/static/repository_host_catalog.go +++ b/internal/host/static/repository_host_catalog.go @@ -52,7 +52,7 @@ func (r *Repository) CreateCatalog(ctx context.Context, c *HostCatalog, opt ...O } c.PublicId = opts.withPublicId } else { - id, err := newHostCatalogId(ctx) + id, err := newHostCatalogId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/host/static/repository_host_catalog_test.go b/internal/host/static/repository_host_catalog_test.go index ddcf40f1c43..a0c92a94a66 100644 --- a/internal/host/static/repository_host_catalog_test.go +++ b/internal/host/static/repository_host_catalog_test.go @@ -19,7 +19,6 @@ import ( ) func TestRepository_CreateCatalog(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -82,7 +81,7 @@ func TestRepository_CreateCatalog(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -90,7 +89,7 @@ func TestRepository_CreateCatalog(t *testing.T) { tt.in.ProjectId = prj.GetPublicId() assert.Empty(tt.in.PublicId) } - got, err := repo.CreateCatalog(ctx, tt.in, tt.opts...) + got, err := repo.CreateCatalog(context.Background(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -110,7 +109,7 @@ func TestRepository_CreateCatalog(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -121,7 +120,7 @@ func TestRepository_CreateCatalog(t *testing.T) { }, } - got, err := repo.CreateCatalog(ctx, in) + got, err := repo.CreateCatalog(context.Background(), in) assert.NoError(err) assert.NotNil(got) assertPublicId(t, "hcst", got.PublicId) @@ -130,7 +129,7 @@ func TestRepository_CreateCatalog(t *testing.T) { assert.Equal(in.Description, got.Description) assert.Equal(got.CreateTime, got.UpdateTime) - got2, err := repo.CreateCatalog(ctx, in) + got2, err := repo.CreateCatalog(context.Background(), in) assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err) assert.Nil(got2) }) @@ -138,7 +137,7 @@ func TestRepository_CreateCatalog(t *testing.T) { t.Run("valid-duplicate-names-diff-projects", func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -151,7 +150,7 @@ func TestRepository_CreateCatalog(t *testing.T) { in2 := in.clone() in.ProjectId = prj.GetPublicId() - got, err := repo.CreateCatalog(ctx, in) + got, err := repo.CreateCatalog(context.Background(), in) assert.NoError(err) assert.NotNil(got) assertPublicId(t, "hcst", got.PublicId) @@ -161,7 +160,7 @@ func TestRepository_CreateCatalog(t *testing.T) { assert.Equal(got.CreateTime, got.UpdateTime) in2.ProjectId = prj2.GetPublicId() - got2, err := repo.CreateCatalog(ctx, in2) + got2, err := repo.CreateCatalog(context.Background(), in2) assert.NoError(err) assert.NotNil(got2) assertPublicId(t, "hcst", got2.PublicId) @@ -441,19 +440,19 @@ func TestRepository_UpdateCatalog(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) tt.orig.ProjectId = prj.GetPublicId() - orig, err := repo.CreateCatalog(ctx, tt.orig) + orig, err := repo.CreateCatalog(context.Background(), tt.orig) require.NoError(t, err) require.NotNil(t, orig) if tt.chgFn != nil { orig = tt.chgFn(orig) } - got, gotCount, err := repo.UpdateCatalog(ctx, orig, 1, tt.masks) + got, gotCount, err := repo.UpdateCatalog(context.Background(), orig, 1, tt.masks) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Equal(tt.wantCount, gotCount, "row count") @@ -486,7 +485,7 @@ func TestRepository_UpdateCatalog(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) @@ -512,7 +511,7 @@ func TestRepository_UpdateCatalog(t *testing.T) { t.Run("valid-duplicate-names-diff-projects", func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) @@ -551,7 +550,7 @@ func TestRepository_UpdateCatalog(t *testing.T) { t.Run("change-project-id", func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) @@ -575,13 +574,12 @@ func TestRepository_UpdateCatalog(t *testing.T) { } func TestRepository_LookupCatalog(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) cat := testCatalog(t, conn, prj.PublicId) - badId, err := newHostCatalogId(ctx) + badId, err := newHostCatalogId() assert.NoError(t, err) assert.NotNil(t, badId) @@ -614,11 +612,11 @@ func TestRepository_LookupCatalog(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) - got, err := repo.LookupCatalog(ctx, tt.id) + got, err := repo.LookupCatalog(context.Background(), tt.id) if tt.wantErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err) return @@ -637,14 +635,13 @@ func TestRepository_LookupCatalog(t *testing.T) { } func TestRepository_DeleteCatalog(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) _, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) cat := testCatalog(t, conn, prj.PublicId) - badId, err := newHostCatalogId(ctx) + badId, err := newHostCatalogId() assert.NoError(t, err) assert.NotNil(t, badId) @@ -677,11 +674,11 @@ func TestRepository_DeleteCatalog(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) assert.NotNil(repo) - got, err := repo.DeleteCatalog(ctx, tt.id) + got, err := repo.DeleteCatalog(context.Background(), tt.id) if tt.wantErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err) return @@ -694,12 +691,11 @@ func TestRepository_DeleteCatalog(t *testing.T) { func TestRepository_ListCatalogs_Multiple_Scopes(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) rw := db.New(conn) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(t, err) assert.NotNil(t, repo) @@ -715,7 +711,7 @@ func TestRepository_ListCatalogs_Multiple_Scopes(t *testing.T) { } } - got, err := repo.ListCatalogs(ctx, projs) + got, err := repo.ListCatalogs(context.Background(), projs) require.NoError(t, err) assert.Equal(t, total, len(got)) } diff --git a/internal/host/static/repository_host_set.go b/internal/host/static/repository_host_set.go index 401df90c7f1..5b78b36a398 100644 --- a/internal/host/static/repository_host_set.go +++ b/internal/host/static/repository_host_set.go @@ -55,7 +55,7 @@ func (r *Repository) CreateSet(ctx context.Context, projectId string, s *HostSet } s.PublicId = opts.withPublicId } else { - id, err := newHostSetId(ctx) + id, err := newHostSetId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/host/static/repository_host_set_member.go b/internal/host/static/repository_host_set_member.go index 2ab502e648b..637ae16b5e5 100644 --- a/internal/host/static/repository_host_set_member.go +++ b/internal/host/static/repository_host_set_member.go @@ -78,7 +78,7 @@ func (r *Repository) newMembers(ctx context.Context, setId string, hostIds []str var members []any for _, id := range hostIds { var m *HostSetMember - m, err := NewHostSetMember(ctx, setId, id) + m, err := NewHostSetMember(setId, id) if err != nil { return nil, errors.Wrap(ctx, err, "static.newMembers") } @@ -263,7 +263,7 @@ func (r *Repository) SetSetMembers(ctx context.Context, projectId string, setId } var deletions, additions []any for _, c := range changes { - m, err := NewHostSetMember(ctx, setId, c.HostId) + m, err := NewHostSetMember(setId, c.HostId) if err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op) } diff --git a/internal/host/static/repository_host_set_member_test.go b/internal/host/static/repository_host_set_member_test.go index fe233d473b7..9f8d3648515 100644 --- a/internal/host/static/repository_host_set_member_test.go +++ b/internal/host/static/repository_host_set_member_test.go @@ -21,7 +21,6 @@ import ( ) func TestRepository_AddSetMembers_Parameters(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -116,10 +115,10 @@ func TestRepository_AddSetMembers_Parameters(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.AddSetMembers(ctx, tt.args.projectId, tt.args.setId, tt.args.version, tt.args.hostIds, tt.args.opt...) + got, err := repo.AddSetMembers(context.Background(), tt.args.projectId, tt.args.setId, tt.args.version, tt.args.hostIds, tt.args.opt...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -145,7 +144,6 @@ func TestRepository_AddSetMembers_Parameters(t *testing.T) { } func TestRepository_AddSetMembers_Combinations(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -158,7 +156,7 @@ func TestRepository_AddSetMembers_Combinations(t *testing.T) { c := TestCatalogs(t, conn, prj.PublicId, 1)[0] set := TestSets(t, conn, c.PublicId, 1)[0] - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -188,7 +186,7 @@ func TestRepository_AddSetMembers_Combinations(t *testing.T) { for _, h := range Hosts { hostIds2 = append(hostIds2, h.PublicId) } - got2, err2 := repo.AddSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, hostIds2) + got2, err2 := repo.AddSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, hostIds2) require.NoError(err2) require.NotNil(got2) @@ -205,13 +203,12 @@ func TestRepository_AddSetMembers_Combinations(t *testing.T) { hostIds3 = append(hostIds2, h.PublicId) } hostIds3 = append(hostIds3, hostIds2...) - got3, err3 := repo.AddSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, hostIds3) + got3, err3 := repo.AddSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, hostIds3) require.Error(err3) require.Nil(got3) } func TestRepository_DeleteSetMembers_Parameters(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -310,10 +307,10 @@ func TestRepository_DeleteSetMembers_Parameters(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.DeleteSetMembers(ctx, tt.args.projectId, tt.args.setId, tt.args.version, tt.args.hostIds, tt.args.opt...) + got, err := repo.DeleteSetMembers(context.Background(), tt.args.projectId, tt.args.setId, tt.args.version, tt.args.hostIds, tt.args.opt...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Zero(got) @@ -334,7 +331,6 @@ func TestRepository_DeleteSetMembers_Parameters(t *testing.T) { } func TestRepository_DeleteSetMembers_Combinations(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -347,7 +343,7 @@ func TestRepository_DeleteSetMembers_Combinations(t *testing.T) { c := TestCatalogs(t, conn, prj.PublicId, 1)[0] set := TestSets(t, conn, c.PublicId, 1)[0] - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -364,13 +360,13 @@ func TestRepository_DeleteSetMembers_Combinations(t *testing.T) { hostsB, idsB := hosts[split:], hostIds[split:] // first call - delete first half of hosts - should succeed - got, err := repo.DeleteSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, idsA) + got, err := repo.DeleteSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, idsA) assert.NoError(err) require.Equal(len(idsA), got) assert.NoError(db.TestVerifyOplog(t, rw, set.PublicId, db.WithOperation(oplog.OpType_OP_TYPE_DELETE), db.WithCreateNotBefore(10*time.Second))) // verify hostsB are still members - members, err := getHosts(ctx, repo.reader, set.PublicId, unlimited) + members, err := getHosts(context.Background(), repo.reader, set.PublicId, unlimited) require.NoError(err) opts := []cmp.Option{ @@ -382,29 +378,28 @@ func TestRepository_DeleteSetMembers_Combinations(t *testing.T) { // second call - delete first half of hosts again - should fail set.Version = set.Version + 1 - got2, err2 := repo.DeleteSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, idsA) + got2, err2 := repo.DeleteSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, idsA) require.Error(err2) assert.Zero(got2) // third call - delete first half and second half - should fail - got3, err3 := repo.DeleteSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, hostIds) + got3, err3 := repo.DeleteSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, hostIds) require.Error(err3) assert.Zero(got3) // fourth call - delete second half of hosts - should succeed - got4, err4 := repo.DeleteSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, idsB) + got4, err4 := repo.DeleteSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, idsB) assert.NoError(err4) require.Equal(len(idsB), got4) assert.NoError(db.TestVerifyOplog(t, rw, set.PublicId, db.WithOperation(oplog.OpType_OP_TYPE_DELETE), db.WithCreateNotBefore(10*time.Second))) // verify no members remain - Members, err := getHosts(ctx, repo.reader, set.PublicId, unlimited) + Members, err := getHosts(context.Background(), repo.reader, set.PublicId, unlimited) require.NoError(err) require.Empty(Members) } func TestRepository_SetSetMembers_Parameters(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -501,10 +496,10 @@ func TestRepository_SetSetMembers_Parameters(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, gotCount, err := repo.SetSetMembers(ctx, tt.args.projectId, tt.args.setId, tt.args.version, tt.args.hostIds, tt.args.opt...) + got, gotCount, err := repo.SetSetMembers(context.Background(), tt.args.projectId, tt.args.setId, tt.args.version, tt.args.hostIds, tt.args.opt...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -533,7 +528,6 @@ func TestRepository_SetSetMembers_Parameters(t *testing.T) { } func TestRepository_SetSetMembers_Combinations(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -546,7 +540,7 @@ func TestRepository_SetSetMembers_Combinations(t *testing.T) { c := TestCatalogs(t, conn, prj.PublicId, 1)[0] set := TestSets(t, conn, c.PublicId, 1)[0] - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -569,7 +563,7 @@ func TestRepository_SetSetMembers_Combinations(t *testing.T) { } // first call - empty set, empty host Ids - no additions no deletions - got1, gotCount1, err1 := repo.SetSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, nil) + got1, gotCount1, err1 := repo.SetSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, nil) assert.NoError(err1) assert.Empty(got1) assert.Zero(gotCount1) @@ -585,7 +579,7 @@ func TestRepository_SetSetMembers_Combinations(t *testing.T) { // third call - mix of additions and deletions set.Version = set.Version + 1 - got3, gotCount3, err3 := repo.SetSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, hostIdsB) + got3, gotCount3, err3 := repo.SetSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, hostIdsB) assert.NoError(err3) assert.Equal(4, gotCount3) assert.NoError(db.TestVerifyOplog(t, rw, set.PublicId, db.WithOperation(oplog.OpType_OP_TYPE_UPDATE), db.WithCreateNotBefore(10*time.Second))) @@ -594,7 +588,7 @@ func TestRepository_SetSetMembers_Combinations(t *testing.T) { // fourth call - all deletions set.Version = set.Version + 1 - got4, gotCount4, err4 := repo.SetSetMembers(ctx, prj.PublicId, set.PublicId, set.Version, nil) + got4, gotCount4, err4 := repo.SetSetMembers(context.Background(), prj.PublicId, set.PublicId, set.Version, nil) assert.NoError(err4) assert.Equal(len(hostsB), gotCount4) assert.NoError(db.TestVerifyOplog(t, rw, set.PublicId, db.WithOperation(oplog.OpType_OP_TYPE_UPDATE), db.WithCreateNotBefore(10*time.Second))) @@ -602,7 +596,6 @@ func TestRepository_SetSetMembers_Combinations(t *testing.T) { } func TestRepository_changes(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -632,11 +625,11 @@ func TestRepository_changes(t *testing.T) { } assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.changes(ctx, set.PublicId, hostIds) + got, err := repo.changes(context.Background(), set.PublicId, hostIds) assert.NoError(err) require.NotNil(got) opts := []cmp.Option{ @@ -662,11 +655,11 @@ func TestRepository_changes(t *testing.T) { } assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.changes(ctx, set.PublicId, nil) + got, err := repo.changes(context.Background(), set.PublicId, nil) assert.NoError(err) require.NotNil(got) opts := []cmp.Option{ @@ -709,11 +702,11 @@ func TestRepository_changes(t *testing.T) { } assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.changes(ctx, set.PublicId, targetHostIds) + got, err := repo.changes(context.Background(), set.PublicId, targetHostIds) assert.NoError(err) require.NotNil(got) opts := []cmp.Option{ diff --git a/internal/host/static/repository_host_set_test.go b/internal/host/static/repository_host_set_test.go index 71b028971f3..86064f86736 100644 --- a/internal/host/static/repository_host_set_test.go +++ b/internal/host/static/repository_host_set_test.go @@ -24,7 +24,6 @@ import ( ) func TestRepository_CreateSet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -115,10 +114,10 @@ func TestRepository_CreateSet(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.CreateSet(ctx, prj.GetPublicId(), tt.in, tt.opts...) + got, err := repo.CreateSet(context.Background(), prj.GetPublicId(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -138,7 +137,7 @@ func TestRepository_CreateSet(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -152,7 +151,7 @@ func TestRepository_CreateSet(t *testing.T) { }, } - got, err := repo.CreateSet(ctx, prj.GetPublicId(), in) + got, err := repo.CreateSet(context.Background(), prj.GetPublicId(), in) require.NoError(err) require.NotNil(got) assertPublicId(t, globals.StaticHostSetPrefix, got.PublicId) @@ -161,14 +160,14 @@ func TestRepository_CreateSet(t *testing.T) { assert.Equal(in.Description, got.Description) assert.Equal(got.CreateTime, got.UpdateTime) - got2, err := repo.CreateSet(ctx, prj.GetPublicId(), in) + got2, err := repo.CreateSet(context.Background(), prj.GetPublicId(), in) assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err) assert.Nil(got2) }) t.Run("valid-duplicate-names-diff-catalogs", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -185,7 +184,7 @@ func TestRepository_CreateSet(t *testing.T) { in2 := in.clone() in.CatalogId = catalogA.PublicId - got, err := repo.CreateSet(ctx, prj.GetPublicId(), in) + got, err := repo.CreateSet(context.Background(), prj.GetPublicId(), in) require.NoError(err) require.NotNil(got) assertPublicId(t, globals.StaticHostSetPrefix, got.PublicId) @@ -195,7 +194,7 @@ func TestRepository_CreateSet(t *testing.T) { assert.Equal(got.CreateTime, got.UpdateTime) in2.CatalogId = catalogB.PublicId - got2, err := repo.CreateSet(ctx, prj.GetPublicId(), in2) + got2, err := repo.CreateSet(context.Background(), prj.GetPublicId(), in2) require.NoError(err) require.NotNil(got2) assertPublicId(t, globals.StaticHostSetPrefix, got2.PublicId) @@ -468,7 +467,7 @@ func TestRepository_UpdateSet(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -477,7 +476,7 @@ func TestRepository_UpdateSet(t *testing.T) { hosts := TestHosts(t, conn, catalog.PublicId, 5) tt.orig.CatalogId = catalog.PublicId - orig, err := repo.CreateSet(ctx, prj.GetPublicId(), tt.orig) + orig, err := repo.CreateSet(context.Background(), prj.GetPublicId(), tt.orig) assert.NoError(err) require.NotNil(orig) TestSetMembers(t, conn, orig.PublicId, hosts) @@ -485,7 +484,7 @@ func TestRepository_UpdateSet(t *testing.T) { if tt.chgFn != nil { orig = tt.chgFn(orig) } - got, gotHosts, gotCount, err := repo.UpdateSet(ctx, prj.GetPublicId(), orig, 1, tt.masks) + got, gotHosts, gotCount, err := repo.UpdateSet(context.Background(), prj.GetPublicId(), orig, 1, tt.masks) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Equal(tt.wantCount, gotCount, "row count") @@ -526,7 +525,7 @@ func TestRepository_UpdateSet(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -538,7 +537,7 @@ func TestRepository_UpdateSet(t *testing.T) { sA, sB := ss[0], ss[1] sA.Name = name - got1, gotHosts1, gotCount1, err := repo.UpdateSet(ctx, prj.GetPublicId(), sA, 1, []string{"name"}) + got1, gotHosts1, gotCount1, err := repo.UpdateSet(context.Background(), prj.GetPublicId(), sA, 1, []string{"name"}) assert.NoError(err) require.NotNil(got1) assert.Equal(name, got1.Name) @@ -547,7 +546,7 @@ func TestRepository_UpdateSet(t *testing.T) { assert.Empty(gotHosts1) sB.Name = name - got2, gotHosts, gotCount2, err := repo.UpdateSet(ctx, prj.GetPublicId(), sB, 1, []string{"name"}) + got2, gotHosts, gotCount2, err := repo.UpdateSet(context.Background(), prj.GetPublicId(), sB, 1, []string{"name"}) assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err) assert.Nil(got2) assert.Equal(db.NoRowsAffected, gotCount2, "row count") @@ -559,7 +558,7 @@ func TestRepository_UpdateSet(t *testing.T) { t.Run("valid-duplicate-names-diff-Catalogs", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -576,7 +575,7 @@ func TestRepository_UpdateSet(t *testing.T) { in2 := in.clone() in.CatalogId = catalogA.PublicId - got, err := repo.CreateSet(ctx, prj.GetPublicId(), in) + got, err := repo.CreateSet(context.Background(), prj.GetPublicId(), in) assert.NoError(err) require.NotNil(got) assertPublicId(t, globals.StaticHostSetPrefix, got.PublicId) @@ -586,11 +585,11 @@ func TestRepository_UpdateSet(t *testing.T) { in2.CatalogId = catalogB.PublicId in2.Name = "first-name" - got2, err := repo.CreateSet(ctx, prj.GetPublicId(), in2) + got2, err := repo.CreateSet(context.Background(), prj.GetPublicId(), in2) assert.NoError(err) require.NotNil(got2) got2.Name = got.Name - got3, gotHosts3, gotCount3, err := repo.UpdateSet(ctx, prj.GetPublicId(), got2, 1, []string{"name"}) + got3, gotHosts3, gotCount3, err := repo.UpdateSet(context.Background(), prj.GetPublicId(), got2, 1, []string{"name"}) assert.NoError(err) require.NotNil(got3) assert.NotSame(got2, got3) @@ -603,7 +602,7 @@ func TestRepository_UpdateSet(t *testing.T) { t.Run("change-project-id", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -621,7 +620,7 @@ func TestRepository_UpdateSet(t *testing.T) { sA.CatalogId = sB.CatalogId assert.Equal(sA.CatalogId, sB.CatalogId) - got1, gotHosts1, gotCount1, err := repo.UpdateSet(ctx, prj.GetPublicId(), sA, 1, []string{"name"}) + got1, gotHosts1, gotCount1, err := repo.UpdateSet(context.Background(), prj.GetPublicId(), sA, 1, []string{"name"}) assert.NoError(err) require.NotNil(got1) @@ -633,7 +632,6 @@ func TestRepository_UpdateSet(t *testing.T) { } func TestRepository_UpdateSet_Limits(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -695,12 +693,12 @@ func TestRepository_UpdateSet_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) hs := hostSet.clone() hs.Description = tt.name - got, gotHosts, _, err := repo.UpdateSet(ctx, prj.PublicId, hs, hs.Version, []string{"Description"}, tt.updateOpts...) + got, gotHosts, _, err := repo.UpdateSet(context.Background(), prj.PublicId, hs, hs.Version, []string{"Description"}, tt.updateOpts...) require.NoError(err) require.NotNil(got) assert.Len(gotHosts, tt.wantLen) @@ -711,7 +709,6 @@ func TestRepository_UpdateSet_Limits(t *testing.T) { } func TestRepository_LookupSet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -726,7 +723,7 @@ func TestRepository_LookupSet(t *testing.T) { emptyHostSet := TestSets(t, conn, catalog.PublicId, 1)[0] - hostSetId, err := newHostSetId(ctx) + hostSetId, err := newHostSetId() require.NoError(t, err) tests := []struct { @@ -762,10 +759,10 @@ func TestRepository_LookupSet(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, gotHosts, err := repo.LookupSet(ctx, tt.in) + got, gotHosts, err := repo.LookupSet(context.Background(), tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -784,7 +781,6 @@ func TestRepository_LookupSet(t *testing.T) { } func TestRepository_LookupSet_Limits(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -846,7 +842,7 @@ func TestRepository_LookupSet_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) got, gotHosts, err := repo.LookupSet(context.Background(), hostSet.PublicId, tt.lookupOpts...) @@ -858,7 +854,6 @@ func TestRepository_LookupSet_Limits(t *testing.T) { } func TestRepository_ListSets(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -898,10 +893,10 @@ func TestRepository_ListSets(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListSets(ctx, tt.in, tt.opts...) + got, err := repo.ListSets(context.Background(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -918,7 +913,6 @@ func TestRepository_ListSets(t *testing.T) { } func TestRepository_ListSets_Limits(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -978,10 +972,10 @@ func TestRepository_ListSets_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListSets(ctx, hostSets[0].CatalogId, tt.listOpts...) + got, err := repo.ListSets(context.Background(), hostSets[0].CatalogId, tt.listOpts...) require.NoError(err) assert.Len(got, tt.wantLen) }) @@ -989,7 +983,6 @@ func TestRepository_ListSets_Limits(t *testing.T) { } func TestRepository_DeleteSet(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1000,7 +993,7 @@ func TestRepository_DeleteSet(t *testing.T) { catalog := TestCatalogs(t, conn, prj.PublicId, 1)[0] hostSet := TestSets(t, conn, catalog.PublicId, 1)[0] - newHostSetId, err := newHostSetId(ctx) + newHostSetId, err := newHostSetId() require.NoError(t, err) tests := []struct { name string @@ -1028,10 +1021,10 @@ func TestRepository_DeleteSet(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.DeleteSet(ctx, prj.PublicId, tt.in) + got, err := repo.DeleteSet(context.Background(), prj.PublicId, tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Zero(got) diff --git a/internal/host/static/repository_host_test.go b/internal/host/static/repository_host_test.go index f082688c305..8b3f06b4ece 100644 --- a/internal/host/static/repository_host_test.go +++ b/internal/host/static/repository_host_test.go @@ -26,7 +26,6 @@ import ( ) func TestRepository_CreateHost(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -164,10 +163,10 @@ func TestRepository_CreateHost(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.CreateHost(ctx, prj.GetPublicId(), tt.in, tt.opts...) + got, err := repo.CreateHost(context.Background(), prj.GetPublicId(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -187,7 +186,7 @@ func TestRepository_CreateHost(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -202,7 +201,7 @@ func TestRepository_CreateHost(t *testing.T) { }, } - got, err := repo.CreateHost(ctx, prj.GetPublicId(), in) + got, err := repo.CreateHost(context.Background(), prj.GetPublicId(), in) require.NoError(err) require.NotNil(got) assertPublicId(t, globals.StaticHostPrefix, got.PublicId) @@ -211,14 +210,14 @@ func TestRepository_CreateHost(t *testing.T) { assert.Equal(in.Description, got.Description) assert.Equal(got.CreateTime, got.UpdateTime) - got2, err := repo.CreateHost(ctx, prj.GetPublicId(), in) + got2, err := repo.CreateHost(context.Background(), prj.GetPublicId(), in) assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err) assert.Nil(got2) }) t.Run("valid-duplicate-names-diff-catalogs", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -236,7 +235,7 @@ func TestRepository_CreateHost(t *testing.T) { in2 := in.clone() in.CatalogId = catalogA.PublicId - got, err := repo.CreateHost(ctx, prj.GetPublicId(), in) + got, err := repo.CreateHost(context.Background(), prj.GetPublicId(), in) require.NoError(err) require.NotNil(got) assertPublicId(t, globals.StaticHostPrefix, got.PublicId) @@ -246,7 +245,7 @@ func TestRepository_CreateHost(t *testing.T) { assert.Equal(got.CreateTime, got.UpdateTime) in2.CatalogId = catalogB.PublicId - got2, err := repo.CreateHost(ctx, prj.GetPublicId(), in2) + got2, err := repo.CreateHost(context.Background(), prj.GetPublicId(), in2) require.NoError(err) require.NotNil(got2) assertPublicId(t, globals.StaticHostPrefix, got2.PublicId) @@ -599,7 +598,7 @@ func TestRepository_UpdateHost(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -607,7 +606,7 @@ func TestRepository_UpdateHost(t *testing.T) { catalog := TestCatalogs(t, conn, prj.PublicId, 1)[0] tt.orig.CatalogId = catalog.PublicId - orig, err := repo.CreateHost(ctx, prj.GetPublicId(), tt.orig) + orig, err := repo.CreateHost(context.Background(), prj.GetPublicId(), tt.orig) assert.NoError(err) require.NotNil(orig) @@ -618,7 +617,7 @@ func TestRepository_UpdateHost(t *testing.T) { if tt.chgFn != nil { orig = tt.chgFn(orig) } - got, gotCount, err := repo.UpdateHost(ctx, prj.GetPublicId(), orig, 1, tt.masks) + got, gotCount, err := repo.UpdateHost(context.Background(), prj.GetPublicId(), orig, 1, tt.masks) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Equal(tt.wantCount, gotCount, "row count") @@ -654,7 +653,7 @@ func TestRepository_UpdateHost(t *testing.T) { t.Run("invalid-duplicate-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -666,7 +665,7 @@ func TestRepository_UpdateHost(t *testing.T) { hA, hB := hs[0], hs[1] hA.Name = name - got1, gotCount1, err := repo.UpdateHost(ctx, prj.GetPublicId(), hA, 1, []string{"name"}) + got1, gotCount1, err := repo.UpdateHost(context.Background(), prj.GetPublicId(), hA, 1, []string{"name"}) assert.NoError(err) require.NotNil(got1) assert.Equal(name, got1.Name) @@ -674,7 +673,7 @@ func TestRepository_UpdateHost(t *testing.T) { assert.NoError(db.TestVerifyOplog(t, rw, hA.PublicId, db.WithOperation(oplog.OpType_OP_TYPE_UPDATE), db.WithCreateNotBefore(10*time.Second))) hB.Name = name - got2, gotCount2, err := repo.UpdateHost(ctx, prj.GetPublicId(), hB, 1, []string{"name"}) + got2, gotCount2, err := repo.UpdateHost(context.Background(), prj.GetPublicId(), hB, 1, []string{"name"}) assert.Truef(errors.Match(errors.T(errors.NotUnique), err), "want err code: %v got err: %v", errors.NotUnique, err) assert.Nil(got2) assert.Equal(db.NoRowsAffected, gotCount2, "row count") @@ -685,7 +684,7 @@ func TestRepository_UpdateHost(t *testing.T) { t.Run("valid-duplicate-names-diff-Catalogs", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -703,7 +702,7 @@ func TestRepository_UpdateHost(t *testing.T) { in2 := in.clone() in.CatalogId = catalogA.PublicId - got, err := repo.CreateHost(ctx, prj.GetPublicId(), in) + got, err := repo.CreateHost(context.Background(), prj.GetPublicId(), in) assert.NoError(err) require.NotNil(got) assertPublicId(t, globals.StaticHostPrefix, got.PublicId) @@ -713,11 +712,11 @@ func TestRepository_UpdateHost(t *testing.T) { in2.CatalogId = catalogB.PublicId in2.Name = "first-name" - got2, err := repo.CreateHost(ctx, prj.GetPublicId(), in2) + got2, err := repo.CreateHost(context.Background(), prj.GetPublicId(), in2) assert.NoError(err) require.NotNil(got2) got2.Name = got.Name - got3, gotCount3, err := repo.UpdateHost(ctx, prj.GetPublicId(), got2, 1, []string{"name"}) + got3, gotCount3, err := repo.UpdateHost(context.Background(), prj.GetPublicId(), got2, 1, []string{"name"}) assert.NoError(err) require.NotNil(got3) assert.NotSame(got2, got3) @@ -729,7 +728,7 @@ func TestRepository_UpdateHost(t *testing.T) { t.Run("change-project-id", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -758,7 +757,6 @@ func TestRepository_UpdateHost(t *testing.T) { } func TestRepository_LookupHost(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -769,7 +767,7 @@ func TestRepository_LookupHost(t *testing.T) { catalog := TestCatalogs(t, conn, prj.PublicId, 1)[0] host := TestHosts(t, conn, catalog.PublicId, 1)[0] - hostId, err := newHostId(ctx) + hostId, err := newHostId() require.NoError(t, err) tests := []struct { name string @@ -796,10 +794,10 @@ func TestRepository_LookupHost(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.LookupHost(ctx, tt.in) + got, err := repo.LookupHost(context.Background(), tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -812,7 +810,6 @@ func TestRepository_LookupHost(t *testing.T) { } func TestRepository_LookupHost_HostSets(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -862,11 +859,10 @@ func TestRepository_LookupHost_HostSets(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.LookupHost(ctx, tt.in) - require.NoError(err) + got, err := repo.LookupHost(context.Background(), tt.in) assert.Empty( cmp.Diff( tt.want, @@ -880,7 +876,6 @@ func TestRepository_LookupHost_HostSets(t *testing.T) { } func TestRepository_ListHosts(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -920,10 +915,10 @@ func TestRepository_ListHosts(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListHosts(ctx, tt.in, tt.opts...) + got, err := repo.ListHosts(context.Background(), tt.in, tt.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) @@ -940,7 +935,6 @@ func TestRepository_ListHosts(t *testing.T) { } func TestRepository_ListHosts_HostSets(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -994,10 +988,10 @@ func TestRepository_ListHosts_HostSets(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListHosts(ctx, tt.in) + got, err := repo.ListHosts(context.Background(), tt.in) require.NoError(err) assert.Empty( cmp.Diff( @@ -1015,7 +1009,6 @@ func TestRepository_ListHosts_HostSets(t *testing.T) { } func TestRepository_ListHosts_Limits(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1075,10 +1068,10 @@ func TestRepository_ListHosts_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListHosts(ctx, hosts[0].CatalogId, tt.listOpts...) + got, err := repo.ListHosts(context.Background(), hosts[0].CatalogId, tt.listOpts...) require.NoError(err) assert.Len(got, tt.wantLen) }) @@ -1086,7 +1079,6 @@ func TestRepository_ListHosts_Limits(t *testing.T) { } func TestRepository_DeleteHost(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1097,7 +1089,7 @@ func TestRepository_DeleteHost(t *testing.T) { catalog := TestCatalogs(t, conn, prj.PublicId, 1)[0] host := TestHosts(t, conn, catalog.PublicId, 1)[0] - newHostId, err := newHostId(ctx) + newHostId, err := newHostId() require.NoError(t, err) tests := []struct { name string @@ -1125,10 +1117,10 @@ func TestRepository_DeleteHost(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.DeleteHost(ctx, catalog.ProjectId, tt.in) + got, err := repo.DeleteHost(context.Background(), catalog.ProjectId, tt.in) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Zero(got) diff --git a/internal/host/static/repository_test.go b/internal/host/static/repository_test.go index 61965aa77ae..16715b5cb40 100644 --- a/internal/host/static/repository_test.go +++ b/internal/host/static/repository_test.go @@ -4,7 +4,6 @@ package static import ( - "context" "testing" "github.com/stretchr/testify/assert" @@ -16,7 +15,6 @@ import ( ) func TestRepository_New(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -109,7 +107,7 @@ func TestRepository_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRepository(ctx, tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms, tt.args.opts...) if tt.wantIsErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantIsErr), err), "want err: %q got: %q", tt.wantIsErr, err) assert.Nil(got) diff --git a/internal/host/static/testing.go b/internal/host/static/testing.go index 94a673923d7..2c524e90b61 100644 --- a/internal/host/static/testing.go +++ b/internal/host/static/testing.go @@ -17,20 +17,19 @@ import ( // the host catalog, the test will fail. func TestCatalogs(t testing.TB, conn *db.DB, projectId string, count int) []*HostCatalog { t.Helper() - ctx := context.Background() assert := assert.New(t) var cats []*HostCatalog for i := 0; i < count; i++ { - cat, err := NewHostCatalog(ctx, projectId) + cat, err := NewHostCatalog(projectId) assert.NoError(err) assert.NotNil(cat) - id, err := newHostCatalogId(ctx) + id, err := newHostCatalogId() assert.NoError(err) assert.NotEmpty(id) cat.PublicId = id w := db.New(conn) - err2 := w.Create(ctx, cat) + err2 := w.Create(context.Background(), cat) assert.NoError(err2) cats = append(cats, cat) } @@ -42,22 +41,21 @@ func TestCatalogs(t testing.TB, conn *db.DB, projectId string, count int) []*Hos // If any errors are encountered during the creation of the host, the test will fail. func TestHosts(t testing.TB, conn *db.DB, catalogId string, count int) []*Host { t.Helper() - ctx := context.Background() assert := assert.New(t) var hosts []*Host for i := 0; i < count; i++ { - host, err := NewHost(ctx, catalogId, WithAddress(fmt.Sprintf("%s-%d", catalogId, i))) + host, err := NewHost(catalogId, WithAddress(fmt.Sprintf("%s-%d", catalogId, i))) assert.NoError(err) assert.NotNil(host) - id, err := newHostId(ctx) + id, err := newHostId() assert.NoError(err) assert.NotEmpty(id) host.PublicId = id w := db.New(conn) - err2 := w.Create(ctx, host) + err2 := w.Create(context.Background(), host) assert.NoError(err2) hosts = append(hosts, host) } @@ -69,21 +67,20 @@ func TestHosts(t testing.TB, conn *db.DB, catalogId string, count int) []*Host { // previously. The test will fail if any errors are encountered. func TestSets(t testing.TB, conn *db.DB, catalogId string, count int) []*HostSet { t.Helper() - ctx := context.Background() assert := assert.New(t) var sets []*HostSet for i := 0; i < count; i++ { - set, err := NewHostSet(ctx, catalogId) + set, err := NewHostSet(catalogId) assert.NoError(err) assert.NotNil(set) - id, err := newHostSetId(ctx) + id, err := newHostSetId() assert.NoError(err) assert.NotEmpty(id) set.PublicId = id w := db.New(conn) - err2 := w.Create(ctx, set) + err2 := w.Create(context.Background(), set) assert.NoError(err2) sets = append(sets, set) } @@ -96,16 +93,15 @@ func TestSets(t testing.TB, conn *db.DB, catalogId string, count int) []*HostSet func TestSetMembers(t testing.TB, conn *db.DB, setId string, hosts []*Host) []*HostSetMember { t.Helper() assert := assert.New(t) - ctx := context.Background() var members []*HostSetMember for _, host := range hosts { - member, err := NewHostSetMember(ctx, setId, host.PublicId) + member, err := NewHostSetMember(setId, host.PublicId) assert.NoError(err) assert.NotNil(member) w := db.New(conn) - err2 := w.Create(ctx, member) + err2 := w.Create(context.Background(), member) assert.NoError(err2) members = append(members, member) } diff --git a/internal/iam/group.go b/internal/iam/group.go index 96ab166615b..0b9a03c38ac 100644 --- a/internal/iam/group.go +++ b/internal/iam/group.go @@ -34,10 +34,10 @@ var ( // NewGroup creates a new in memory group with a scope (project/org) // and allowed options include: withDescripion, WithName. -func NewGroup(ctx context.Context, scopeId string, opt ...Option) (*Group, error) { +func NewGroup(scopeId string, opt ...Option) (*Group, error) { const op = "iam.NewGroup" if scopeId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing scope id") } opts := getOpts(opt...) g := &Group{ diff --git a/internal/iam/group_member.go b/internal/iam/group_member.go index 09e4547558d..fbc9994769c 100644 --- a/internal/iam/group_member.go +++ b/internal/iam/group_member.go @@ -73,13 +73,13 @@ var ( // NewGroupMemberUser creates a new in memory user member of the group. No // options are currently supported. -func NewGroupMemberUser(ctx context.Context, groupId, userId string, _ ...Option) (*GroupMemberUser, error) { +func NewGroupMemberUser(groupId, userId string, _ ...Option) (*GroupMemberUser, error) { const op = "iam.NewGroupMemberUser" if groupId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing group id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing group id") } if userId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing user id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing user id") } return &GroupMemberUser{ GroupMemberUser: &store.GroupMemberUser{ diff --git a/internal/iam/group_member_test.go b/internal/iam/group_member_test.go index a9cdca82c78..5cd1174eb20 100644 --- a/internal/iam/group_member_test.go +++ b/internal/iam/group_member_test.go @@ -19,7 +19,6 @@ import ( func Test_NewGroupMember(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -105,7 +104,7 @@ func Test_NewGroupMember(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewGroupMemberUser(ctx, tt.args.groupId, tt.args.userId, tt.args.opt...) + got, err := NewGroupMemberUser(tt.args.groupId, tt.args.userId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -119,7 +118,6 @@ func Test_NewGroupMember(t *testing.T) { func Test_GroupMemberCreate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -141,7 +139,7 @@ func Test_GroupMemberCreate(t *testing.T) { gm: func() *GroupMemberUser { g := TestGroup(t, conn, org.PublicId) u := TestUser(t, repo, org.PublicId) - gm, err := NewGroupMemberUser(ctx, g.PublicId, u.PublicId) + gm, err := NewGroupMemberUser(g.PublicId, u.PublicId) require.NoError(t, err) return gm }(), @@ -154,7 +152,7 @@ func Test_GroupMemberCreate(t *testing.T) { gm: func() *GroupMemberUser { g := TestGroup(t, conn, proj.PublicId) u := TestUser(t, repo, org.PublicId) - gm, err := NewGroupMemberUser(ctx, g.PublicId, u.PublicId) + gm, err := NewGroupMemberUser(g.PublicId, u.PublicId) require.NoError(t, err) return gm }(), @@ -167,7 +165,7 @@ func Test_GroupMemberCreate(t *testing.T) { gm: func() *GroupMemberUser { id := testId(t) u := TestUser(t, repo, org.PublicId) - gm, err := NewGroupMemberUser(ctx, id, u.PublicId) + gm, err := NewGroupMemberUser(id, u.PublicId) require.NoError(t, err) return gm }(), @@ -181,7 +179,7 @@ func Test_GroupMemberCreate(t *testing.T) { gm: func() *GroupMemberUser { id := testId(t) g := TestGroup(t, conn, proj.PublicId) - gm, err := NewGroupMemberUser(ctx, g.PublicId, id) + gm, err := NewGroupMemberUser(g.PublicId, id) require.NoError(t, err) return gm }(), @@ -227,7 +225,7 @@ func Test_GroupMemberCreate(t *testing.T) { gm: func() *GroupMemberUser { g := TestGroup(t, conn, org.PublicId) u := TestUser(t, repo, org.PublicId) - gm, err := NewGroupMemberUser(ctx, g.PublicId, u.PublicId) + gm, err := NewGroupMemberUser(g.PublicId, u.PublicId) require.NoError(t, err) return gm }(), diff --git a/internal/iam/group_test.go b/internal/iam/group_test.go index 70779855ae4..de131e42489 100644 --- a/internal/iam/group_test.go +++ b/internal/iam/group_test.go @@ -24,7 +24,6 @@ import ( func TestNewGroup(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -84,7 +83,7 @@ func TestNewGroup(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewGroup(ctx, tt.args.scopePublicId, tt.args.opt...) + got, err := NewGroup(tt.args.scopePublicId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.Contains(err.Error(), tt.wantErrMsg) @@ -101,7 +100,6 @@ func TestNewGroup(t *testing.T) { func Test_GroupCreate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -122,9 +120,9 @@ func Test_GroupCreate(t *testing.T) { args: args{ group: func() *Group { id := testId(t) - grp, err := NewGroup(ctx, org.PublicId, WithName(id), WithDescription("description-"+id)) + grp, err := NewGroup(org.PublicId, WithName(id), WithDescription("description-"+id)) require.NoError(t, err) - grpId, err := newGroupId(ctx) + grpId, err := newGroupId() require.NoError(t, err) grp.PublicId = grpId return grp @@ -137,9 +135,9 @@ func Test_GroupCreate(t *testing.T) { args: args{ group: func() *Group { id := testId(t) - grp, err := NewGroup(ctx, proj.PublicId, WithName(id), WithDescription("description-"+id)) + grp, err := NewGroup(proj.PublicId, WithName(id), WithDescription("description-"+id)) require.NoError(t, err) - grpId, err := newGroupId(ctx) + grpId, err := newGroupId() require.NoError(t, err) grp.PublicId = grpId return grp @@ -151,9 +149,9 @@ func Test_GroupCreate(t *testing.T) { name: "valid-with-dup-null-names-and-descriptions", args: args{ group: func() *Group { - grp, err := NewGroup(ctx, org.PublicId) + grp, err := NewGroup(org.PublicId) require.NoError(t, err) - grpId, err := newGroupId(ctx) + grpId, err := newGroupId() require.NoError(t, err) grp.PublicId = grpId return grp @@ -167,9 +165,9 @@ func Test_GroupCreate(t *testing.T) { args: args{ group: func() *Group { id := testId(t) - grp, err := NewGroup(ctx, id) + grp, err := NewGroup(id) require.NoError(t, err) - grpId, err := newGroupId(ctx) + grpId, err := newGroupId() require.NoError(t, err) grp.PublicId = grpId return grp @@ -186,14 +184,14 @@ func Test_GroupCreate(t *testing.T) { w := db.New(conn) if tt.wantDup { g := tt.args.group.Clone().(*Group) - grpId, err := newGroupId(ctx) + grpId, err := newGroupId() require.NoError(err) g.PublicId = grpId - err = w.Create(ctx, g) + err = w.Create(context.Background(), g) require.NoError(err) } g := tt.args.group.Clone().(*Group) - err := w.Create(ctx, g) + err := w.Create(context.Background(), g) if tt.wantErr { require.Error(err) assert.Contains(err.Error(), tt.wantErrMsg) @@ -204,7 +202,7 @@ func Test_GroupCreate(t *testing.T) { foundGrp := allocGroup() foundGrp.PublicId = tt.args.group.PublicId - err = w.LookupByPublicId(ctx, &foundGrp) + err = w.LookupByPublicId(context.Background(), &foundGrp) require.NoError(err) assert.Empty(cmp.Diff(g, &foundGrp, protocmp.Transform())) }) diff --git a/internal/iam/ids.go b/internal/iam/ids.go index 8abc83abaf0..202523d6f70 100644 --- a/internal/iam/ids.go +++ b/internal/iam/ids.go @@ -4,7 +4,6 @@ package iam import ( - "context" "fmt" "github.com/hashicorp/boundary/globals" @@ -18,38 +17,38 @@ const ( RoleGrantPrefix = "rg" ) -func newRoleId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.RolePrefix) +func newRoleId() (string, error) { + id, err := db.NewPublicId(globals.RolePrefix) if err != nil { - return "", errors.Wrap(ctx, err, "iam.newRoleId") + return "", errors.WrapDeprecated(err, "iam.newRoleId") } return id, nil } -func newUserId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.UserPrefix) +func newUserId() (string, error) { + id, err := db.NewPublicId(globals.UserPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "iam.newUserId") + return "", errors.WrapDeprecated(err, "iam.newUserId") } return id, nil } -func newGroupId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.GroupPrefix) +func newGroupId() (string, error) { + id, err := db.NewPublicId(globals.GroupPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "iam.newGroupId") + return "", errors.WrapDeprecated(err, "iam.newGroupId") } return id, nil } -func newScopeId(ctx context.Context, scopeType scope.Type) (string, error) { +func newScopeId(scopeType scope.Type) (string, error) { const op = "iam.newScopeId" if scopeType == scope.Unknown { - return "", errors.New(ctx, errors.InvalidParameter, op, "unknown scope is not supported") + return "", errors.NewDeprecated(errors.InvalidParameter, op, "unknown scope is not supported") } - id, err := db.NewPublicId(ctx, scopeType.Prefix()) + id, err := db.NewPublicId(scopeType.Prefix()) if err != nil { - return "", errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("scope type: %s", scopeType.String()))) + return "", errors.WrapDeprecated(err, op, errors.WithMsg(fmt.Sprintf("scope type: %s", scopeType.String()))) } return id, nil } diff --git a/internal/iam/ids_test.go b/internal/iam/ids_test.go index 589c6ba39cc..7ace038aeef 100644 --- a/internal/iam/ids_test.go +++ b/internal/iam/ids_test.go @@ -4,7 +4,6 @@ package iam import ( - "context" "strings" "testing" @@ -16,19 +15,18 @@ import ( ) func Test_PublicIds(t *testing.T) { - ctx := context.Background() t.Run("role", func(t *testing.T) { - id, err := newRoleId(ctx) + id, err := newRoleId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, globals.RolePrefix+"_")) }) t.Run("user", func(t *testing.T) { - id, err := newUserId(ctx) + id, err := newUserId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, globals.UserPrefix+"_")) }) t.Run("group", func(t *testing.T) { - id, err := newGroupId(ctx) + id, err := newGroupId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, globals.GroupPrefix+"_")) }) @@ -36,15 +34,15 @@ func Test_PublicIds(t *testing.T) { assert.True(t, strings.HasPrefix("mgoidc_1234567890", globals.OidcManagedGroupPrefix+"_")) }) t.Run("scopes", func(t *testing.T) { - id, err := newScopeId(ctx, scope.Org) + id, err := newScopeId(scope.Org) require.NoError(t, err) assert.True(t, strings.HasPrefix(id, scope.Org.Prefix())) - id, err = newScopeId(ctx, scope.Project) + id, err = newScopeId(scope.Project) require.NoError(t, err) assert.True(t, strings.HasPrefix(id, scope.Project.Prefix())) - id, err = newScopeId(ctx, scope.Unknown) + id, err = newScopeId(scope.Unknown) require.Error(t, err) assert.Empty(t, id) assert.True(t, errors.Match(errors.T(errors.InvalidParameter), err)) diff --git a/internal/iam/principal_role.go b/internal/iam/principal_role.go index 2723547b283..354f990ee47 100644 --- a/internal/iam/principal_role.go +++ b/internal/iam/principal_role.go @@ -80,13 +80,13 @@ var ( // NewUserRole creates a new user role in memory. No options are supported // currently. -func NewUserRole(ctx context.Context, roleId, userId string, _ ...Option) (*UserRole, error) { +func NewUserRole(roleId, userId string, _ ...Option) (*UserRole, error) { const op = "iam.NewUserRole" if roleId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing role id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing role id") } if userId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing user id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing user id") } return &UserRole{ UserRole: &store.UserRole{ @@ -157,13 +157,13 @@ var ( // NewGroupRole creates a new group role in memory. No options are supported // currently. -func NewGroupRole(ctx context.Context, roleId, groupId string, opt ...Option) (*GroupRole, error) { +func NewGroupRole(roleId, groupId string, opt ...Option) (*GroupRole, error) { const op = "iam.NewGroupRole" if roleId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing role id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing role id") } if groupId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing group id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing group id") } return &GroupRole{ GroupRole: &store.GroupRole{ @@ -234,13 +234,13 @@ var ( // NewGroupRole creates a new group role in memory. No options are supported // currently. -func NewManagedGroupRole(ctx context.Context, roleId, managedGroupId string, opt ...Option) (*ManagedGroupRole, error) { +func NewManagedGroupRole(roleId, managedGroupId string, opt ...Option) (*ManagedGroupRole, error) { const op = "iam.NewManagedGroupRole" if roleId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing role id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing role id") } if managedGroupId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing managed group id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing managed group id") } return &ManagedGroupRole{ ManagedGroupRole: &store.ManagedGroupRole{ diff --git a/internal/iam/principal_role_ext_test.go b/internal/iam/principal_role_ext_test.go index c8beacc6dd6..3d9975ade19 100644 --- a/internal/iam/principal_role_ext_test.go +++ b/internal/iam/principal_role_ext_test.go @@ -26,7 +26,6 @@ import ( func TestNewManagedGroupRole(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := iam.TestRepo(t, conn, wrapper) @@ -83,7 +82,7 @@ func TestNewManagedGroupRole(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := iam.NewManagedGroupRole(ctx, tt.args.roleId, tt.args.ManagedGroupId, tt.args.opt...) + got, err := iam.NewManagedGroupRole(tt.args.roleId, tt.args.ManagedGroupId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -136,7 +135,7 @@ func TestManagedGroupRole_Create(t *testing.T) { args: args{ role: func() *iam.ManagedGroupRole { role := iam.TestRole(t, conn, org.PublicId) - principalRole, err := iam.NewManagedGroupRole(ctx, role.PublicId, mg.PublicId) + principalRole, err := iam.NewManagedGroupRole(role.PublicId, mg.PublicId) require.NoError(t, err) return principalRole }(), @@ -148,7 +147,7 @@ func TestManagedGroupRole_Create(t *testing.T) { args: args{ role: func() *iam.ManagedGroupRole { role := iam.TestRole(t, conn, org2.PublicId) - principalRole, err := iam.NewManagedGroupRole(ctx, role.PublicId, mg.PublicId) + principalRole, err := iam.NewManagedGroupRole(role.PublicId, mg.PublicId) require.NoError(t, err) return principalRole }(), @@ -159,7 +158,7 @@ func TestManagedGroupRole_Create(t *testing.T) { args: args{ role: func() *iam.ManagedGroupRole { id := testId(t) - principalRole, err := iam.NewManagedGroupRole(ctx, id, mg.PublicId) + principalRole, err := iam.NewManagedGroupRole(id, mg.PublicId) require.NoError(t, err) return principalRole }(), @@ -173,7 +172,7 @@ func TestManagedGroupRole_Create(t *testing.T) { role: func() *iam.ManagedGroupRole { id := testId(t) role := iam.TestRole(t, conn, org.PublicId) - principalRole, err := iam.NewManagedGroupRole(ctx, role.PublicId, id) + principalRole, err := iam.NewManagedGroupRole(role.PublicId, id) require.NoError(t, err) return principalRole }(), @@ -219,7 +218,7 @@ func TestManagedGroupRole_Create(t *testing.T) { args: args{ role: func() *iam.ManagedGroupRole { role := iam.TestRole(t, conn, org.PublicId) - principalRole, err := iam.NewManagedGroupRole(ctx, role.PublicId, mg.PublicId) + principalRole, err := iam.NewManagedGroupRole(role.PublicId, mg.PublicId) require.NoError(t, err) return principalRole }(), @@ -373,19 +372,18 @@ func TestManagedGroupRole_Delete(t *testing.T) { func TestManagedGroupRole_Clone(t *testing.T) { t.Parallel() - ctx := context.Background() t.Run("valid", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - mgr, err := iam.NewManagedGroupRole(ctx, "r_abc", "mgoidc_abc") + mgr, err := iam.NewManagedGroupRole("r_abc", "mgoidc_abc") require.NoError(err) cp := mgr.Clone() assert.True(proto.Equal(cp.(*iam.ManagedGroupRole).ManagedGroupRole, mgr.ManagedGroupRole)) }) t.Run("not-equal", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - mgr, err := iam.NewManagedGroupRole(ctx, "r_abc", "mgoidc_abc") + mgr, err := iam.NewManagedGroupRole("r_abc", "mgoidc_abc") require.NoError(err) - mgr2, err := iam.NewManagedGroupRole(ctx, "r_xyz", "mgoidc_xyz") + mgr2, err := iam.NewManagedGroupRole("r_xyz", "mgoidc_xyz") require.NoError(err) cp := mgr.Clone() diff --git a/internal/iam/principal_role_test.go b/internal/iam/principal_role_test.go index 8a1331f2b07..8c1564ca7df 100644 --- a/internal/iam/principal_role_test.go +++ b/internal/iam/principal_role_test.go @@ -19,7 +19,6 @@ import ( func TestNewUserRole(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -90,7 +89,7 @@ func TestNewUserRole(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewUserRole(ctx, tt.args.roleId, tt.args.userId, tt.args.opt...) + got, err := NewUserRole(tt.args.roleId, tt.args.userId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -104,7 +103,6 @@ func TestNewUserRole(t *testing.T) { func TestUserRole_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -127,7 +125,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { role := TestRole(t, conn, org.PublicId) principal := TestUser(t, repo, org.PublicId) - principalRole, err := NewUserRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewUserRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -140,7 +138,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { role := TestRole(t, conn, proj.PublicId) principal := TestUser(t, repo, org.PublicId) - principalRole, err := NewUserRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewUserRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -153,7 +151,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { role := TestRole(t, conn, org2.PublicId) principal := TestUser(t, repo, org.PublicId) - principalRole, err := NewUserRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewUserRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -165,7 +163,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { role := TestRole(t, conn, proj2.PublicId) principal := TestUser(t, repo, org.PublicId) - principalRole, err := NewUserRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewUserRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -177,7 +175,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { id := testId(t) principal := TestUser(t, repo, org.PublicId) - principalRole, err := NewUserRole(ctx, id, principal.PublicId) + principalRole, err := NewUserRole(id, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -191,7 +189,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { id := testId(t) role := TestRole(t, conn, proj.PublicId) - principalRole, err := NewUserRole(ctx, role.PublicId, id) + principalRole, err := NewUserRole(role.PublicId, id) require.NoError(t, err) return principalRole }(), @@ -239,7 +237,7 @@ func TestUserRole_Create(t *testing.T) { role: func() *UserRole { role := TestRole(t, conn, org.PublicId) principal := TestUser(t, repo, org.PublicId) - principalRole, err := NewUserRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewUserRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -382,7 +380,6 @@ func TestUserRole_Clone(t *testing.T) { func TestNewGroupRole(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -453,7 +450,7 @@ func TestNewGroupRole(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewGroupRole(ctx, tt.args.roleId, tt.args.groupId, tt.args.opt...) + got, err := NewGroupRole(tt.args.roleId, tt.args.groupId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -467,7 +464,6 @@ func TestNewGroupRole(t *testing.T) { func TestGroupRole_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -490,7 +486,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { role := TestRole(t, conn, org.PublicId) principal := TestGroup(t, conn, org.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewGroupRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -503,7 +499,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { role := TestRole(t, conn, proj.PublicId) principal := TestGroup(t, conn, proj.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewGroupRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -516,7 +512,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { role := TestRole(t, conn, org2.PublicId) principal := TestGroup(t, conn, org.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewGroupRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -528,7 +524,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { role := TestRole(t, conn, proj2.PublicId) principal := TestGroup(t, conn, org.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewGroupRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -540,7 +536,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { id := testId(t) principal := TestGroup(t, conn, org.PublicId) - principalRole, err := NewGroupRole(ctx, id, principal.PublicId) + principalRole, err := NewGroupRole(id, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -554,7 +550,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { id := testId(t) role := TestRole(t, conn, proj.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, id) + principalRole, err := NewGroupRole(role.PublicId, id) require.NoError(t, err) return principalRole }(), @@ -602,7 +598,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { role := TestRole(t, conn, org.PublicId) principal := TestGroup(t, conn, org.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewGroupRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), @@ -617,7 +613,7 @@ func TestGroupRole_Create(t *testing.T) { role: func() *GroupRole { role := TestRole(t, conn, proj.PublicId) principal := TestGroup(t, conn, proj.PublicId) - principalRole, err := NewGroupRole(ctx, role.PublicId, principal.PublicId) + principalRole, err := NewGroupRole(role.PublicId, principal.PublicId) require.NoError(t, err) return principalRole }(), diff --git a/internal/iam/repository.go b/internal/iam/repository.go index 8afe7923ef8..689e1322ee6 100644 --- a/internal/iam/repository.go +++ b/internal/iam/repository.go @@ -15,7 +15,7 @@ import ( "github.com/hashicorp/boundary/internal/types/scope" ) -var ErrMetadataScopeNotFound = errors.New(context.Background(), errors.RecordNotFound, "iam", "scope not found for metadata", errors.WithoutEvent()) +var ErrMetadataScopeNotFound = errors.NewDeprecated(errors.RecordNotFound, "iam", "scope not found for metadata", errors.WithoutEvent()) // Repository is the iam database repository type Repository struct { @@ -29,16 +29,16 @@ type Repository struct { // NewRepository creates a new iam Repository. Supports the options: WithLimit // which sets a default limit on results returned by repo operations. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { const op = "iam.NewRepository" if r == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil reader") } if w == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil writer") } if kms == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil kms") } opts := getOpts(opt...) if opts.withLimit == 0 { diff --git a/internal/iam/repository_group.go b/internal/iam/repository_group.go index c84fe2897b9..c5830c152a9 100644 --- a/internal/iam/repository_group.go +++ b/internal/iam/repository_group.go @@ -31,7 +31,7 @@ func (r *Repository) CreateGroup(ctx context.Context, group *Group, _ ...Option) if group.ScopeId == "" { return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") } - id, err := newGroupId(ctx) + id, err := newGroupId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -97,7 +97,7 @@ func (r *Repository) UpdateGroup(ctx context.Context, group *Group, version uint if err != nil { return errors.Wrap(ctx, err, op) } - repo, err := NewRepository(ctx, read, w, r.kms) + repo, err := NewRepository(read, w, r.kms) if err != nil { return errors.Wrap(ctx, err, op) } @@ -135,7 +135,7 @@ func (r *Repository) LookupGroup(ctx context.Context, withPublicId string, _ ... if err := read.LookupByPublicId(ctx, &g); err != nil { return errors.Wrap(ctx, err, op) } - repo, err := NewRepository(ctx, read, w, r.kms) + repo, err := NewRepository(read, w, r.kms) if err != nil { return errors.Wrap(ctx, err, op) } @@ -224,7 +224,7 @@ func (r *Repository) AddGroupMembers(ctx context.Context, groupId string, groupV newGroupMembers := make([]any, 0, len(userIds)) for _, id := range userIds { - gm, err := NewGroupMemberUser(ctx, groupId, id) + gm, err := NewGroupMemberUser(groupId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group member")) } @@ -317,7 +317,7 @@ func (r *Repository) DeleteGroupMembers(ctx context.Context, groupId string, gro deleteMembers := make([]any, 0, len(userIds)) for _, id := range userIds { - member, err := NewGroupMemberUser(ctx, groupId, id) + member, err := NewGroupMemberUser(groupId, id) if err != nil { return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group member")) } @@ -542,13 +542,13 @@ func groupMemberChanges(ctx context.Context, reader db.Reader, groupId string, u } switch c.Action { case "add": - gm, err := NewGroupMemberUser(ctx, groupId, c.MemberId) + gm, err := NewGroupMemberUser(groupId, c.MemberId) if err != nil { return nil, nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group member for add")) } addMembers = append(addMembers, gm) case "delete": - gm, err := NewGroupMemberUser(ctx, groupId, c.MemberId) + gm, err := NewGroupMemberUser(groupId, c.MemberId) if err != nil { return nil, nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group member for delete")) } diff --git a/internal/iam/repository_group_test.go b/internal/iam/repository_group_test.go index 4974044be15..0d019273c5b 100644 --- a/internal/iam/repository_group_test.go +++ b/internal/iam/repository_group_test.go @@ -22,7 +22,6 @@ import ( func TestRepository_CreateGroup(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -47,7 +46,7 @@ func TestRepository_CreateGroup(t *testing.T) { name: "valid-org", args: args{ group: func() *Group { - g, err := NewGroup(ctx, org.PublicId, WithName("valid-org"+id), WithDescription(id)) + g, err := NewGroup(org.PublicId, WithName("valid-org"+id), WithDescription(id)) assert.NoError(t, err) return g }(), @@ -58,7 +57,7 @@ func TestRepository_CreateGroup(t *testing.T) { name: "valid-proj", args: args{ group: func() *Group { - g, err := NewGroup(ctx, proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) + g, err := NewGroup(proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) assert.NoError(t, err) return g }(), @@ -69,7 +68,7 @@ func TestRepository_CreateGroup(t *testing.T) { name: "bad-public-id", args: args{ group: func() *Group { - g, err := NewGroup(ctx, proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) + g, err := NewGroup(proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) assert.NoError(t, err) g.PublicId = id return g @@ -105,7 +104,7 @@ func TestRepository_CreateGroup(t *testing.T) { name: "bad-scope-id", args: args{ group: func() *Group { - g, err := NewGroup(ctx, id) + g, err := NewGroup(id) assert.NoError(t, err) return g }(), @@ -118,7 +117,7 @@ func TestRepository_CreateGroup(t *testing.T) { name: "dup-name", args: args{ group: func() *Group { - g, err := NewGroup(ctx, org.PublicId, WithName("dup-name"+id), WithDescription(id)) + g, err := NewGroup(org.PublicId, WithName("dup-name"+id), WithDescription(id)) assert.NoError(t, err) return g }(), @@ -133,7 +132,7 @@ func TestRepository_CreateGroup(t *testing.T) { name: "dup-name-but-diff-scope", args: args{ group: func() *Group { - g, err := NewGroup(ctx, proj.PublicId, WithName("dup-name-but-diff-scope"+id), WithDescription(id)) + g, err := NewGroup(proj.PublicId, WithName("dup-name-but-diff-scope"+id), WithDescription(id)) assert.NoError(t, err) return g }(), @@ -148,7 +147,7 @@ func TestRepository_CreateGroup(t *testing.T) { assert := assert.New(t) if tt.wantDup { - dup, err := NewGroup(ctx, org.PublicId, tt.args.opt...) + dup, err := NewGroup(org.PublicId, tt.args.opt...) assert.NoError(err) dup, err = repo.CreateGroup(context.Background(), dup, tt.args.opt...) assert.NoError(err) @@ -474,7 +473,6 @@ func TestRepository_UpdateGroup(t *testing.T) { func TestRepository_DeleteGroup(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") a := assert.New(t) rw := db.New(conn) @@ -482,7 +480,7 @@ func TestRepository_DeleteGroup(t *testing.T) { repo := TestRepo(t, conn, wrapper) org, _ := TestScopes(t, repo) - grpId, err := newGroupId(ctx) + grpId, err := newGroupId() a.NoError(err) type args struct { @@ -520,7 +518,7 @@ func TestRepository_DeleteGroup(t *testing.T) { name: "not-found", args: args{ group: func() *Group { - g, err := NewGroup(ctx, org.PublicId) + g, err := NewGroup(org.PublicId) g.PublicId = grpId a.NoError(err) return g @@ -534,7 +532,7 @@ func TestRepository_DeleteGroup(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - deletedRows, err := repo.DeleteGroup(ctx, tt.args.group.PublicId, tt.args.opt...) + deletedRows, err := repo.DeleteGroup(context.Background(), tt.args.group.PublicId, tt.args.opt...) if tt.wantErr { assert.Error(err) assert.Equal(0, deletedRows) @@ -546,7 +544,7 @@ func TestRepository_DeleteGroup(t *testing.T) { } assert.NoError(err) assert.Equal(tt.wantRowsDeleted, deletedRows) - foundGroup, _, err := repo.LookupGroup(ctx, tt.args.group.PublicId) + foundGroup, _, err := repo.LookupGroup(context.Background(), tt.args.group.PublicId) assert.NoError(err) assert.Nil(foundGroup) diff --git a/internal/iam/repository_principal_role.go b/internal/iam/repository_principal_role.go index 8e5de88a819..a982fead74e 100644 --- a/internal/iam/repository_principal_role.go +++ b/internal/iam/repository_principal_role.go @@ -38,7 +38,7 @@ func (r *Repository) AddPrincipalRoles(ctx context.Context, roleId string, roleV newUserRoles := make([]any, 0, len(userIds)) for _, id := range userIds { - usrRole, err := NewUserRole(ctx, roleId, id) + usrRole, err := NewUserRole(roleId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory user role")) } @@ -46,7 +46,7 @@ func (r *Repository) AddPrincipalRoles(ctx context.Context, roleId string, roleV } newGrpRoles := make([]any, 0, len(groupIds)) for _, id := range groupIds { - grpRole, err := NewGroupRole(ctx, roleId, id) + grpRole, err := NewGroupRole(roleId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group role")) } @@ -54,7 +54,7 @@ func (r *Repository) AddPrincipalRoles(ctx context.Context, roleId string, roleV } newManagedGrpRoles := make([]any, 0, len(managedGroupIds)) for _, id := range managedGroupIds { - managedGrpRole, err := NewManagedGroupRole(ctx, roleId, id) + managedGrpRole, err := NewManagedGroupRole(roleId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory managed group role")) } @@ -340,7 +340,7 @@ func (r *Repository) DeletePrincipalRoles(ctx context.Context, roleId string, ro deleteUserRoles := make([]any, 0, len(userIds)) for _, id := range userIds { - usrRole, err := NewUserRole(ctx, roleId, id) + usrRole, err := NewUserRole(roleId, id) if err != nil { return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory user role")) } @@ -348,7 +348,7 @@ func (r *Repository) DeletePrincipalRoles(ctx context.Context, roleId string, ro } deleteGrpRoles := make([]any, 0, len(groupIds)) for _, id := range groupIds { - grpRole, err := NewGroupRole(ctx, roleId, id) + grpRole, err := NewGroupRole(roleId, id) if err != nil { return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group role")) } @@ -356,7 +356,7 @@ func (r *Repository) DeletePrincipalRoles(ctx context.Context, roleId string, ro } deleteManagedGrpRoles := make([]any, 0, len(managedGroupIds)) for _, id := range managedGroupIds { - managedGrpRole, err := NewManagedGroupRole(ctx, roleId, id) + managedGrpRole, err := NewManagedGroupRole(roleId, id) if err != nil { return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory managed group role")) } @@ -508,7 +508,7 @@ func (r *Repository) PrincipalsToSet(ctx context.Context, role *Role, userIds, g for _, id := range userIds { userIdsMap[id] = struct{}{} if _, ok := existingUsers[id]; !ok { - usrRole, err := NewUserRole(ctx, role.PublicId, id) + usrRole, err := NewUserRole(role.PublicId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory user role for add")) } @@ -520,7 +520,7 @@ func (r *Repository) PrincipalsToSet(ctx context.Context, role *Role, userIds, g for _, id := range groupIds { groupIdsMap[id] = struct{}{} if _, ok := existingGroups[id]; !ok { - grpRole, err := NewGroupRole(ctx, role.PublicId, id) + grpRole, err := NewGroupRole(role.PublicId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group role for add")) } @@ -532,7 +532,7 @@ func (r *Repository) PrincipalsToSet(ctx context.Context, role *Role, userIds, g for _, id := range managedGroupIds { managedGroupIdsMap[id] = struct{}{} if _, ok := existingManagedGroups[id]; !ok { - managedGrpRole, err := NewManagedGroupRole(ctx, role.PublicId, id) + managedGrpRole, err := NewManagedGroupRole(role.PublicId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory managed group role for add")) } @@ -542,7 +542,7 @@ func (r *Repository) PrincipalsToSet(ctx context.Context, role *Role, userIds, g var deleteUserRoles []any for _, p := range existingUsers { if _, ok := userIdsMap[p.PrincipalId]; !ok { - usrRole, err := NewUserRole(ctx, p.GetRoleId(), p.GetPrincipalId()) + usrRole, err := NewUserRole(p.GetRoleId(), p.GetPrincipalId()) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory user role for delete")) } @@ -552,7 +552,7 @@ func (r *Repository) PrincipalsToSet(ctx context.Context, role *Role, userIds, g var deleteGrpRoles []any for _, p := range existingGroups { if _, ok := groupIdsMap[p.PrincipalId]; !ok { - grpRole, err := NewGroupRole(ctx, p.GetRoleId(), p.GetPrincipalId()) + grpRole, err := NewGroupRole(p.GetRoleId(), p.GetPrincipalId()) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory group role for delete")) } @@ -562,7 +562,7 @@ func (r *Repository) PrincipalsToSet(ctx context.Context, role *Role, userIds, g var deleteManagedGrpRoles []any for _, p := range existingManagedGroups { if _, ok := managedGroupIdsMap[p.PrincipalId]; !ok { - managedGrpRole, err := NewManagedGroupRole(ctx, p.GetRoleId(), p.GetPrincipalId()) + managedGrpRole, err := NewManagedGroupRole(p.GetRoleId(), p.GetPrincipalId()) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory managed group role for delete")) } @@ -610,11 +610,6 @@ func splitPrincipals(ctx context.Context, principals []string) (users, groups, m managedGroups = make([]string, 0, len(principals)) } managedGroups = append(managedGroups, principal) - case strings.HasPrefix(principal, globals.LdapManagedGroupPrefix): - if managedGroups == nil { - managedGroups = make([]string, 0, len(principals)) - } - managedGroups = append(managedGroups, principal) default: return nil, nil, nil, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("invalid principal ID %q", principal)) } diff --git a/internal/iam/repository_role.go b/internal/iam/repository_role.go index 6c9f6b38789..86bbe76e1ec 100644 --- a/internal/iam/repository_role.go +++ b/internal/iam/repository_role.go @@ -29,7 +29,7 @@ func (r *Repository) CreateRole(ctx context.Context, role *Role, _ ...Option) (* if role.ScopeId == "" { return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") } - id, err := newRoleId(ctx) + id, err := newRoleId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -99,7 +99,7 @@ func (r *Repository) UpdateRole(ctx context.Context, role *Role, version uint32, if err != nil { return errors.Wrap(ctx, err, op) } - repo, err := NewRepository(ctx, read, w, r.kms) + repo, err := NewRepository(read, w, r.kms) if err != nil { return errors.Wrap(ctx, err, op) } @@ -142,7 +142,7 @@ func (r *Repository) LookupRole(ctx context.Context, withPublicId string, _ ...O if err := read.LookupByPublicId(ctx, &role); err != nil { return errors.Wrap(ctx, err, op) } - repo, err := NewRepository(ctx, read, w, r.kms) + repo, err := NewRepository(read, w, r.kms) if err != nil { return errors.Wrap(ctx, err, op) } diff --git a/internal/iam/repository_role_grant.go b/internal/iam/repository_role_grant.go index 6c12ae142d3..711c79d74ed 100644 --- a/internal/iam/repository_role_grant.go +++ b/internal/iam/repository_role_grant.go @@ -34,7 +34,7 @@ func (r *Repository) AddRoleGrants(ctx context.Context, roleId string, roleVersi newRoleGrants := make([]any, 0, len(grants)) for _, grant := range grants { - roleGrant, err := NewRoleGrant(ctx, roleId, grant) + roleGrant, err := NewRoleGrant(roleId, grant) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } @@ -169,7 +169,7 @@ func (r *Repository) DeleteRoleGrants(ctx context.Context, roleId string, roleVe deleteRoleGrants := make([]any, 0, len(grants)) for _, grant := range grants { // Use a fake scope, just want to get out a canonical string - perm, err := perms.Parse(ctx, "o_abcd1234", grant, perms.WithSkipFinalValidation(true)) + perm, err := perms.Parse("o_abcd1234", grant, perms.WithSkipFinalValidation(true)) if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("parsing grant string")) } @@ -178,7 +178,7 @@ func (r *Repository) DeleteRoleGrants(ctx context.Context, roleId string, roleVe continue } - roleGrant, err := NewRoleGrant(ctx, roleId, grant) + roleGrant, err := NewRoleGrant(roleId, grant) if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } @@ -261,7 +261,7 @@ func (r *Repository) SetRoleGrants(ctx context.Context, roleId string, roleVersi deleteRoleGrants := make([]any, 0, len(grants)) for _, grant := range grants { // Use a fake scope, just want to get out a canonical string - perm, err := perms.Parse(ctx, "o_abcd1234", grant, perms.WithSkipFinalValidation(true)) + perm, err := perms.Parse("o_abcd1234", grant, perms.WithSkipFinalValidation(true)) if err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("error parsing grant string")) } @@ -277,7 +277,7 @@ func (r *Repository) SetRoleGrants(ctx context.Context, roleId string, roleVersi } // Not found, so add - rg, err = NewRoleGrant(ctx, roleId, grant) + rg, err = NewRoleGrant(roleId, grant) if err != nil { return nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } diff --git a/internal/iam/repository_role_grant_test.go b/internal/iam/repository_role_grant_test.go index 051a4084493..6594c9dd1da 100644 --- a/internal/iam/repository_role_grant_test.go +++ b/internal/iam/repository_role_grant_test.go @@ -223,7 +223,6 @@ func TestRepository_ListRoleGrants(t *testing.T) { func TestRepository_DeleteRoleGrants(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -341,7 +340,7 @@ func TestRepository_DeleteRoleGrants(t *testing.T) { grants := make([]*RoleGrant, 0, tt.args.createCnt) grantStrings := make([]string, 0, tt.args.createCnt) for i := 0; i < tt.args.createCnt; i++ { - g, err := NewRoleGrant(ctx, tt.args.role.PublicId, fmt.Sprintf("actions=*;id=s_%d", i), tt.args.opt...) + g, err := NewRoleGrant(tt.args.role.PublicId, fmt.Sprintf("actions=*;id=s_%d", i), tt.args.opt...) require.NoError(err) grantStrings = append(grantStrings, g.RawGrant) grants = append(grants, g) @@ -406,7 +405,6 @@ func TestRepository_DeleteRoleGrants(t *testing.T) { func TestRepository_SetRoleGrants_Randomize(t *testing.T) { t.Parallel() - ctx := context.Background() require := require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -423,7 +421,7 @@ func TestRepository_SetRoleGrants_Randomize(t *testing.T) { totalCnt := 30 grants := make([]*roleGrantWrapper, 0, totalCnt) for i := 0; i < totalCnt; i++ { - g, err := NewRoleGrant(ctx, role.PublicId, fmt.Sprintf("id=s_%d;actions=*", i)) + g, err := NewRoleGrant(role.PublicId, fmt.Sprintf("id=s_%d;actions=*", i)) require.NoError(err) grants = append(grants, &roleGrantWrapper{ grantString: g.RawGrant, @@ -448,16 +446,16 @@ func TestRepository_SetRoleGrants_Randomize(t *testing.T) { // First time, run a couple of error conditions if i == 1 { - _, _, err := repo.SetRoleGrants(ctx, "", 1, []string{}) + _, _, err := repo.SetRoleGrants(context.Background(), "", 1, []string{}) require.Error(err) - _, _, err = repo.SetRoleGrants(ctx, role.PublicId, 1, nil) + _, _, err = repo.SetRoleGrants(context.Background(), role.PublicId, 1, nil) require.Error(err) } - _, _, err := repo.SetRoleGrants(ctx, role.PublicId, uint32(i), grantsToSet) + _, _, err := repo.SetRoleGrants(context.Background(), role.PublicId, uint32(i), grantsToSet) require.NoError(err) - roleGrants, err := repo.ListRoleGrants(ctx, role.PublicId) + roleGrants, err := repo.ListRoleGrants(context.Background(), role.PublicId) require.NoError(err) require.Equal(len(grantsToSet), len(roleGrants)) for _, rg := range roleGrants { diff --git a/internal/iam/repository_role_test.go b/internal/iam/repository_role_test.go index 4d9b4dadb73..2f8826a3dc7 100644 --- a/internal/iam/repository_role_test.go +++ b/internal/iam/repository_role_test.go @@ -22,7 +22,6 @@ import ( func TestRepository_CreateRole(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -47,7 +46,7 @@ func TestRepository_CreateRole(t *testing.T) { name: "valid-org", args: args{ role: func() *Role { - r, err := NewRole(ctx, org.PublicId, WithName("valid-org"+id), WithDescription(id)) + r, err := NewRole(org.PublicId, WithName("valid-org"+id), WithDescription(id)) assert.NoError(t, err) return r }(), @@ -58,7 +57,7 @@ func TestRepository_CreateRole(t *testing.T) { name: "valid-proj", args: args{ role: func() *Role { - r, err := NewRole(ctx, proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) + r, err := NewRole(proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) assert.NoError(t, err) return r }(), @@ -69,7 +68,7 @@ func TestRepository_CreateRole(t *testing.T) { name: "bad-public-id", args: args{ role: func() *Role { - r, err := NewRole(ctx, proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) + r, err := NewRole(proj.PublicId, WithName("valid-proj"+id), WithDescription(id)) assert.NoError(t, err) r.PublicId = id return r @@ -105,7 +104,7 @@ func TestRepository_CreateRole(t *testing.T) { name: "bad-scope-id", args: args{ role: func() *Role { - r, err := NewRole(ctx, id) + r, err := NewRole(id) assert.NoError(t, err) return r }(), @@ -118,7 +117,7 @@ func TestRepository_CreateRole(t *testing.T) { name: "dup-name", args: args{ role: func() *Role { - r, err := NewRole(ctx, org.PublicId, WithName("dup-name"+id), WithDescription(id)) + r, err := NewRole(org.PublicId, WithName("dup-name"+id), WithDescription(id)) assert.NoError(t, err) return r }(), @@ -133,7 +132,7 @@ func TestRepository_CreateRole(t *testing.T) { name: "dup-name-but-diff-scope", args: args{ role: func() *Role { - r, err := NewRole(ctx, proj.PublicId, WithName("dup-name-but-diff-scope"+id), WithDescription(id)) + r, err := NewRole(proj.PublicId, WithName("dup-name-but-diff-scope"+id), WithDescription(id)) assert.NoError(t, err) return r }(), @@ -148,7 +147,7 @@ func TestRepository_CreateRole(t *testing.T) { assert := assert.New(t) if tt.wantDup { - dup, err := NewRole(ctx, org.PublicId, tt.args.opt...) + dup, err := NewRole(org.PublicId, tt.args.opt...) assert.NoError(err) dup, err = repo.CreateRole(context.Background(), dup, tt.args.opt...) assert.NoError(err) @@ -471,14 +470,13 @@ func TestRepository_UpdateRole(t *testing.T) { func TestRepository_DeleteRole(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) org, _ := TestScopes(t, repo) - roleId, err := newRoleId(ctx) + roleId, err := newRoleId() require.NoError(t, err) type args struct { @@ -517,7 +515,7 @@ func TestRepository_DeleteRole(t *testing.T) { name: "not-found", args: args{ role: func() *Role { - r, err := NewRole(ctx, org.PublicId) + r, err := NewRole(org.PublicId) r.PublicId = roleId require.NoError(t, err) return r @@ -531,7 +529,7 @@ func TestRepository_DeleteRole(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) - deletedRows, err := repo.DeleteRole(ctx, tt.args.role.PublicId, tt.args.opt...) + deletedRows, err := repo.DeleteRole(context.Background(), tt.args.role.PublicId, tt.args.opt...) if tt.wantErr { assert.Error(err) assert.Equal(0, deletedRows) @@ -543,7 +541,7 @@ func TestRepository_DeleteRole(t *testing.T) { } assert.NoError(err) assert.Equal(tt.wantRowsDeleted, deletedRows) - foundRole, _, _, err := repo.LookupRole(ctx, tt.args.role.PublicId) + foundRole, _, _, err := repo.LookupRole(context.Background(), tt.args.role.PublicId) assert.NoError(err) assert.Nil(foundRole) diff --git a/internal/iam/repository_scope.go b/internal/iam/repository_scope.go index 95ed1bdb558..e06a2d22680 100644 --- a/internal/iam/repository_scope.go +++ b/internal/iam/repository_scope.go @@ -68,7 +68,7 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o } scopePublicId = opts.withPublicId } else { - scopePublicId, err = newScopeId(ctx, scopeType) + scopePublicId, err = newScopeId(scopeType) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -103,11 +103,11 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o _ = adminRole default: - adminRole, err = NewRole(ctx, scopePublicId) + adminRole, err = NewRole(scopePublicId) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("error instantiating new admin role")) } - adminRolePublicId, err = newRoleId(ctx) + adminRolePublicId, err = newRoleId() if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("error generating public id for new admin role")) } @@ -129,11 +129,11 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o var defaultRole *Role var defaultRoleRaw any if !opts.withSkipDefaultRoleCreation { - defaultRole, err = NewRole(ctx, scopePublicId) + defaultRole, err = NewRole(scopePublicId) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("error instantiating new default role")) } - defaultRolePublicId, err = newRoleId(ctx) + defaultRolePublicId, err = newRoleId() if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("error generating public id for new default role")) } @@ -223,7 +223,7 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o msgs = append(msgs, &roleOplogMsg) - roleGrant, err := NewRoleGrant(ctx, adminRolePublicId, "id=*;type=*;actions=*") + roleGrant, err := NewRoleGrant(adminRolePublicId, "id=*;type=*;actions=*") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } @@ -233,7 +233,7 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o } msgs = append(msgs, roleGrantOplogMsgs...) - rolePrincipal, err := NewUserRole(ctx, adminRolePublicId, userId) + rolePrincipal, err := NewUserRole(adminRolePublicId, userId) if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role user")) } @@ -291,38 +291,38 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o switch s.Type { case scope.Project.String(): - roleGrant, err := NewRoleGrant(ctx, defaultRolePublicId, "id=*;type=session;actions=list,read:self,cancel:self") + roleGrant, err := NewRoleGrant(defaultRolePublicId, "id=*;type=session;actions=list,read:self,cancel:self") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } grants = append(grants, roleGrant) - roleGrant, err = NewRoleGrant(ctx, defaultRolePublicId, "type=target;actions=list") + roleGrant, err = NewRoleGrant(defaultRolePublicId, "type=target;actions=list") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } grants = append(grants, roleGrant) default: - roleGrant, err := NewRoleGrant(ctx, defaultRolePublicId, "id=*;type=scope;actions=list,no-op") + roleGrant, err := NewRoleGrant(defaultRolePublicId, "id=*;type=scope;actions=list,no-op") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } grants = append(grants, roleGrant) - roleGrant, err = NewRoleGrant(ctx, defaultRolePublicId, "id=*;type=auth-method;actions=authenticate,list") + roleGrant, err = NewRoleGrant(defaultRolePublicId, "id=*;type=auth-method;actions=authenticate,list") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } grants = append(grants, roleGrant) - roleGrant, err = NewRoleGrant(ctx, defaultRolePublicId, "id={{.Account.Id}};actions=read,change-password") + roleGrant, err = NewRoleGrant(defaultRolePublicId, "id={{.Account.Id}};actions=read,change-password") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } grants = append(grants, roleGrant) - roleGrant, err = NewRoleGrant(ctx, defaultRolePublicId, "id=*;type=auth-token;actions=list,read:self,delete:self") + roleGrant, err = NewRoleGrant(defaultRolePublicId, "id=*;type=auth-token;actions=list,read:self,delete:self") if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role grant")) } @@ -343,7 +343,7 @@ func (r *Repository) CreateScope(ctx context.Context, s *Scope, userId string, o if s.Type == scope.Project.String() { userId = globals.AnyAuthenticatedUserId } - rolePrincipal, err := NewUserRole(ctx, defaultRolePublicId, userId) + rolePrincipal, err := NewUserRole(defaultRolePublicId, userId) if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory role user")) } diff --git a/internal/iam/repository_scope_test.go b/internal/iam/repository_scope_test.go index 255bec3ba05..0c721ee9946 100644 --- a/internal/iam/repository_scope_test.go +++ b/internal/iam/repository_scope_test.go @@ -22,7 +22,6 @@ import ( func Test_Repository_Scope_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -32,15 +31,15 @@ func Test_Repository_Scope_Create(t *testing.T) { t.Run("valid-scope", func(t *testing.T) { assert, require := assert.New(t), require.New(t) id := testId(t) - s, err := NewOrg(ctx, WithName(id)) + s, err := NewOrg(WithName(id)) require.NoError(err) - s, err = repo.CreateScope(ctx, s, "") + s, err = repo.CreateScope(context.Background(), s, "") require.NoError(err) require.NotNil(s) assert.NotEmpty(s.GetPublicId()) assert.Equal(s.GetName(), id) - foundScope, err := repo.LookupScope(ctx, s.PublicId) + foundScope, err := repo.LookupScope(context.Background(), s.PublicId) require.NoError(err) assert.True(proto.Equal(foundScope, s)) @@ -49,16 +48,16 @@ func Test_Repository_Scope_Create(t *testing.T) { }) t.Run("valid-scope-withPublicId", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - publicId, err := newScopeId(ctx, scope.Org) + publicId, err := newScopeId(scope.Org) require.NoError(err) - s, err := NewOrg(ctx) + s, err := NewOrg() require.NoError(err) - s, err = repo.CreateScope(ctx, s, "", WithPublicId(publicId)) + s, err = repo.CreateScope(context.Background(), s, "", WithPublicId(publicId)) require.NoError(err) require.NotNil(s) assert.Equal(publicId, s.GetPublicId()) - foundScope, err := repo.LookupScope(ctx, s.PublicId) + foundScope, err := repo.LookupScope(context.Background(), s.PublicId) require.NoError(err) assert.True(proto.Equal(foundScope, s)) @@ -69,7 +68,7 @@ func Test_Repository_Scope_Create(t *testing.T) { assert, require := assert.New(t), require.New(t) id := testId(t) - s, err := NewOrg(ctx, WithName(id)) + s, err := NewOrg(WithName(id)) require.NoError(err) s, err = repo.CreateScope(context.Background(), s, "") @@ -78,7 +77,7 @@ func Test_Repository_Scope_Create(t *testing.T) { assert.NotEmpty(s.GetPublicId()) assert.Equal(s.GetName(), id) - s2, err := NewOrg(ctx, WithName(id)) + s2, err := NewOrg(WithName(id)) require.NoError(err) s2, err = repo.CreateScope(context.Background(), s2, "") require.Error(err) @@ -88,7 +87,7 @@ func Test_Repository_Scope_Create(t *testing.T) { assert, require := assert.New(t), require.New(t) id := testId(t) - s, err := NewOrg(ctx, WithName(id)) + s, err := NewOrg(WithName(id)) require.NoError(err) s, err = repo.CreateScope(context.Background(), s, "") require.NoError(err) @@ -96,13 +95,13 @@ func Test_Repository_Scope_Create(t *testing.T) { assert.NotEmpty(s.GetPublicId()) assert.Equal(s.GetName(), id) - p, err := NewProject(ctx, s.PublicId, WithName(id)) + p, err := NewProject(s.PublicId, WithName(id)) require.NoError(err) p, err = repo.CreateScope(context.Background(), p, "") require.NoError(err) require.NotEmpty(p.PublicId) - p2, err := NewProject(ctx, s.PublicId, WithName(id)) + p2, err := NewProject(s.PublicId, WithName(id)) require.NoError(err) p2, err = repo.CreateScope(context.Background(), p2, "") assert.Error(err) @@ -112,7 +111,7 @@ func Test_Repository_Scope_Create(t *testing.T) { t.Run(fmt.Sprintf("skipping-role-creation-%t", skipCreate), func(t *testing.T) { assert, require := assert.New(t), require.New(t) id := testId(t) - s, err := NewOrg(ctx, WithName(id)) + s, err := NewOrg(WithName(id)) require.NoError(err) s, err = repo.CreateScope(context.Background(), s, user.GetPublicId(), WithSkipAdminRoleCreation(skipCreate), WithSkipDefaultRoleCreation(skipCreate)) require.NoError(err) @@ -137,7 +136,6 @@ func Test_Repository_Scope_Create(t *testing.T) { func Test_Repository_Scope_Update(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -149,7 +147,7 @@ func Test_Repository_Scope_Update(t *testing.T) { s := testOrg(t, repo, id, "") assert.Equal(id, s.Name) - foundScope, err := repo.LookupScope(ctx, s.PublicId) + foundScope, err := repo.LookupScope(context.Background(), s.PublicId) require.NoError(err) assert.Empty(foundScope.GetDescription()) // should be "" after update in db assert.True(proto.Equal(foundScope, s)) @@ -159,7 +157,7 @@ func Test_Repository_Scope_Update(t *testing.T) { s.Name = "foo" + id s.Description = "desc-id" // not in the field mask paths - s, updatedRows, err := repo.UpdateScope(ctx, s, 1, []string{"Name"}) + s, updatedRows, err := repo.UpdateScope(context.Background(), s, 1, []string{"Name"}) require.NoError(err) assert.Equal(1, updatedRows) require.NotNil(s) @@ -167,7 +165,7 @@ func Test_Repository_Scope_Update(t *testing.T) { // TODO: This isn't empty because of ICU-490 -- when that is resolved, fix this // assert.Empty(s.GetDescription()) - foundScope, err = repo.LookupScope(ctx, s.PublicId) + foundScope, err = repo.LookupScope(context.Background(), s.PublicId) require.NoError(err) assert.Equal(foundScope.GetPublicId(), s.GetPublicId()) assert.Empty(foundScope.GetDescription()) @@ -177,7 +175,7 @@ func Test_Repository_Scope_Update(t *testing.T) { s.Name = "test2" s.Description = "desc-id-2" - s, updatedRows, err = repo.UpdateScope(ctx, s, 2, []string{"Name", "Description"}) + s, updatedRows, err = repo.UpdateScope(context.Background(), s, 2, []string{"Name", "Description"}) require.NoError(err) assert.Equal(1, updatedRows) require.NotNil(s) @@ -191,14 +189,14 @@ func Test_Repository_Scope_Update(t *testing.T) { s := testOrg(t, repo, id, "") assert.Equal(id, s.Name) - project, err := NewProject(ctx, s.PublicId) + project, err := NewProject(s.PublicId) require.NoError(err) - project, err = repo.CreateScope(ctx, project, "") + project, err = repo.CreateScope(context.Background(), project, "") require.NoError(err) require.NotNil(project) project.ParentId = project.PublicId - project, updatedRows, err := repo.UpdateScope(ctx, project, 1, []string{"ParentId"}) + project, updatedRows, err := repo.UpdateScope(context.Background(), project, 1, []string{"ParentId"}) require.Error(err) assert.Nil(project) assert.Equal(0, updatedRows) diff --git a/internal/iam/repository_test.go b/internal/iam/repository_test.go index d1affb1f322..494cb60eec7 100644 --- a/internal/iam/repository_test.go +++ b/internal/iam/repository_test.go @@ -92,7 +92,7 @@ func TestNewRepository(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRepository(context.Background(), tt.args.r, tt.args.w, tt.args.kms) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms) if tt.wantErr { require.Error(err) assert.Equal(tt.wantErrString, err.Error()) @@ -106,7 +106,6 @@ func TestNewRepository(t *testing.T) { func Test_Repository_create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -115,31 +114,31 @@ func Test_Repository_create(t *testing.T) { assert, require := assert.New(t), require.New(t) id := testId(t) - s, err := NewOrg(ctx, WithName("fname-"+id)) + s, err := NewOrg(WithName("fname-" + id)) assert.NoError(err) - s.PublicId, err = newScopeId(ctx, scope.Org) + s.PublicId, err = newScopeId(scope.Org) require.NoError(err) - retScope, err := repo.create(ctx, s) + retScope, err := repo.create(context.Background(), s) require.NoError(err) require.NotNil(retScope) assert.NotEmpty(retScope.GetPublicId()) assert.Equal(retScope.GetName(), "fname-"+id) - foundScope, err := repo.LookupScope(ctx, s.PublicId) + foundScope, err := repo.LookupScope(context.Background(), s.PublicId) require.NoError(err) assert.True(proto.Equal(foundScope, retScope.(*Scope))) var metadata store.Metadata - err = rw.LookupWhere(ctx, &metadata, "key = ? and value = ?", []any{"resource-public-id", s.PublicId}) + err = rw.LookupWhere(context.Background(), &metadata, "key = ? and value = ?", []any{"resource-public-id", s.PublicId}) require.NoError(err) var foundEntry oplog.Entry - err = rw.LookupWhere(ctx, &foundEntry, "id = ?", []any{metadata.EntryId}) + err = rw.LookupWhere(context.Background(), &foundEntry, "id = ?", []any{metadata.EntryId}) assert.NoError(err) }) t.Run("nil-resource", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - resource, err := repo.create(ctx, nil) + resource, err := repo.create(context.Background(), nil) require.Error(err) assert.Nil(resource) assert.Equal("iam.(Repository).create: missing resource: parameter violation: error #100", err.Error()) diff --git a/internal/iam/repository_user.go b/internal/iam/repository_user.go index 1dcf817b46e..b10e324e62d 100644 --- a/internal/iam/repository_user.go +++ b/internal/iam/repository_user.go @@ -36,7 +36,7 @@ func (r *Repository) CreateUser(ctx context.Context, user *User, opt ...Option) } u.PublicId = opts.withPublicId } else { - id, err := newUserId(ctx) + id, err := newUserId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -282,11 +282,11 @@ func (r *Repository) LookupUserWithLogin(ctx context.Context, accountId string, if err != nil { return errors.Wrap(ctx, err, op) } - obtainedUser, err = NewUser(ctx, acct.ScopeId, opt...) + obtainedUser, err = NewUser(acct.ScopeId, opt...) if err != nil { return errors.Wrap(ctx, err, op) } - id, err := newUserId(ctx) + id, err := newUserId() if err != nil { return errors.Wrap(ctx, err, op) } diff --git a/internal/iam/repository_user_test.go b/internal/iam/repository_user_test.go index 9eb3a60ca23..fd7fd28c80f 100644 --- a/internal/iam/repository_user_test.go +++ b/internal/iam/repository_user_test.go @@ -31,7 +31,6 @@ import ( func TestRepository_CreateUser(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -54,7 +53,7 @@ func TestRepository_CreateUser(t *testing.T) { name: "valid", args: args{ user: func() *iam.User { - u, err := iam.NewUser(ctx, org.PublicId, iam.WithName("valid"+id), iam.WithDescription(id)) + u, err := iam.NewUser(org.PublicId, iam.WithName("valid"+id), iam.WithDescription(id)) assert.NoError(t, err) return u }(), @@ -65,7 +64,7 @@ func TestRepository_CreateUser(t *testing.T) { name: "bad-scope-id", args: args{ user: func() *iam.User { - u, err := iam.NewUser(ctx, id) + u, err := iam.NewUser(id) assert.NoError(t, err) return u }(), @@ -77,7 +76,7 @@ func TestRepository_CreateUser(t *testing.T) { name: "dup-name", args: args{ user: func() *iam.User { - u, err := iam.NewUser(ctx, org.PublicId, iam.WithName("dup-name"+id)) + u, err := iam.NewUser(org.PublicId, iam.WithName("dup-name"+id)) assert.NoError(t, err) return u }(), @@ -91,11 +90,11 @@ func TestRepository_CreateUser(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) if tt.wantDup { - dup, err := repo.CreateUser(ctx, tt.args.user, tt.args.opt...) + dup, err := repo.CreateUser(context.Background(), tt.args.user, tt.args.opt...) require.NoError(err) require.NotNil(dup) } - u, err := repo.CreateUser(ctx, tt.args.user, tt.args.opt...) + u, err := repo.CreateUser(context.Background(), tt.args.user, tt.args.opt...) if tt.wantErr { require.Error(err) assert.Nil(u) @@ -111,7 +110,7 @@ func TestRepository_CreateUser(t *testing.T) { assert.NotNil(u.CreateTime) assert.NotNil(u.UpdateTime) - foundUser, _, err := repo.LookupUser(ctx, u.PublicId) + foundUser, _, err := repo.LookupUser(context.Background(), u.PublicId) require.NoError(err) assert.True(proto.Equal(foundUser, u)) @@ -132,7 +131,7 @@ func TestRepository_LookupUser_WithDifferentPrimaryAuthMethods(t *testing.T) { kmsCache := kms.TestKms(t, conn, wrapper) repo := iam.TestRepo(t, conn, wrapper) org, _ := iam.TestScopes(t, repo) - databaseWrapper, err := kmsCache.GetWrapper(ctx, org.PublicId, kms.KeyPurposeDatabase) + databaseWrapper, err := kmsCache.GetWrapper(context.Background(), org.PublicId, kms.KeyPurposeDatabase) require.NoError(t, err) var accountIds []string @@ -149,7 +148,7 @@ func TestRepository_LookupUser_WithDifferentPrimaryAuthMethods(t *testing.T) { accountIds = append(accountIds, pwAcct.PublicId) u := iam.TestUser(t, repo, org.PublicId) - newAccts, err := repo.AddUserAccounts(ctx, u.PublicId, u.Version, accountIds) + newAccts, err := repo.AddUserAccounts(context.Background(), u.PublicId, u.Version, accountIds) require.NoError(t, err) sort.Strings(newAccts) require.Equal(t, accountIds, newAccts) @@ -499,7 +498,6 @@ func TestRepository_UpdateUser(t *testing.T) { func TestRepository_DeleteUser(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -541,9 +539,9 @@ func TestRepository_DeleteUser(t *testing.T) { name: "not-found", args: args{ user: func() *iam.User { - u, err := iam.NewUser(ctx, org.PublicId) + u, err := iam.NewUser(org.PublicId) require.NoError(t, err) - id, err := db.NewPublicId(ctx, globals.UserPrefix) + id, err := db.NewPublicId(globals.UserPrefix) require.NoError(t, err) u.PublicId = id return u @@ -557,7 +555,7 @@ func TestRepository_DeleteUser(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - deletedRows, err := repo.DeleteUser(ctx, tt.args.user.PublicId, tt.args.opt...) + deletedRows, err := repo.DeleteUser(context.Background(), tt.args.user.PublicId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.Equal(0, deletedRows) @@ -569,7 +567,7 @@ func TestRepository_DeleteUser(t *testing.T) { } require.NoError(err) assert.Equal(tt.wantRowsDeleted, deletedRows) - foundUser, _, err := repo.LookupUser(ctx, tt.args.user.PublicId) + foundUser, _, err := repo.LookupUser(context.Background(), tt.args.user.PublicId) require.NoError(err) assert.Nil(foundUser) diff --git a/internal/iam/role.go b/internal/iam/role.go index f33548c6684..bbd80fb2795 100644 --- a/internal/iam/role.go +++ b/internal/iam/role.go @@ -34,10 +34,10 @@ var ( // NewRole creates a new in memory role with a scope (project/org) // allowed options include: withDescripion, WithName, withGrantScopeId. -func NewRole(ctx context.Context, scopeId string, opt ...Option) (*Role, error) { +func NewRole(scopeId string, opt ...Option) (*Role, error) { const op = "iam.NewRole" if scopeId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing scope id") } opts := getOpts(opt...) r := &Role{ diff --git a/internal/iam/role_grant.go b/internal/iam/role_grant.go index 05e6592daec..4047a2ce21c 100644 --- a/internal/iam/role_grant.go +++ b/internal/iam/role_grant.go @@ -28,21 +28,21 @@ var ( ) // NewRoleGrant creates a new in memory role grant -func NewRoleGrant(ctx context.Context, roleId string, grant string, _ ...Option) (*RoleGrant, error) { +func NewRoleGrant(roleId string, grant string, _ ...Option) (*RoleGrant, error) { const op = "iam.NewRoleGrant" if roleId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing role id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing role id") } if grant == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing grant") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing grant") } // Validate that the grant parses successfully. Note that we fake the scope // here to avoid a lookup as the scope is only relevant at actual ACL // checking time and we just care that it parses correctly. - perm, err := perms.Parse(ctx, "o_abcd1234", grant) + perm, err := perms.Parse("o_abcd1234", grant) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithMsg("parsing grant string")) + return nil, errors.WrapDeprecated(err, op, errors.WithMsg("parsing grant string")) } rg := &RoleGrant{ RoleGrant: &store.RoleGrant{ @@ -80,7 +80,7 @@ func (g *RoleGrant) VetForWrite(ctx context.Context, _ db.Reader, _ db.OpType, _ // checking time and we just care that it parses correctly. We may have // already done this in NewRoleGrant, but we re-check and set it here // anyways because it should still be part of the vetting process. - perm, err := perms.Parse(ctx, "o_abcd1234", g.RawGrant) + perm, err := perms.Parse("o_abcd1234", g.RawGrant) if err != nil { return errors.Wrap(ctx, err, op, errors.WithMsg("parsing grant string")) } diff --git a/internal/iam/role_grant_test.go b/internal/iam/role_grant_test.go index f8d13b9a525..ca63f81cc1e 100644 --- a/internal/iam/role_grant_test.go +++ b/internal/iam/role_grant_test.go @@ -17,7 +17,6 @@ import ( func TestRoleGrant_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -91,7 +90,7 @@ func TestRoleGrant_Create(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) db.TestDeleteWhere(t, conn, func() any { a := allocRoleGrant(); return &a }(), "1=1") - got, err := NewRoleGrant(ctx, tt.args.roleId, tt.args.grant, tt.args.opt...) + got, err := NewRoleGrant(tt.args.roleId, tt.args.grant, tt.args.opt...) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -139,7 +138,6 @@ func TestRoleGrant_Update(t *testing.T) { func TestRoleGrant_Delete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -198,10 +196,10 @@ func TestRoleGrant_Delete(t *testing.T) { assert, require := assert.New(t), require.New(t) r := allocRoleGrant() db.TestDeleteWhere(t, conn, &r, "1=1") - rg, err := NewRoleGrant(ctx, projRole.PublicId, "id=u_bcde;actions=read,update") + rg, err := NewRoleGrant(projRole.PublicId, "id=u_bcde;actions=read,update") require.NoError(err) require.NoError(rw.Create(context.Background(), rg)) - rg, err = NewRoleGrant(ctx, projRole.PublicId, "id=u_cdef;actions=read,update") + rg, err = NewRoleGrant(projRole.PublicId, "id=u_cdef;actions=read,update") require.NoError(err) require.NoError(rw.Create(context.Background(), rg)) @@ -226,7 +224,6 @@ func TestRoleGrant_Delete(t *testing.T) { func TestRoleGrant_Clone(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -235,7 +232,7 @@ func TestRoleGrant_Clone(t *testing.T) { s := testOrg(t, repo, "", "") role := TestRole(t, conn, s.PublicId) - g, err := NewRoleGrant(ctx, role.PublicId, "id=*;type=*;actions=*") + g, err := NewRoleGrant(role.PublicId, "id=*;type=*;actions=*") assert.NoError(err) assert.NotNil(g) assert.Equal(g.RoleId, role.PublicId) @@ -249,13 +246,13 @@ func TestRoleGrant_Clone(t *testing.T) { s := testOrg(t, repo, "", "") role := TestRole(t, conn, s.PublicId) - g, err := NewRoleGrant(ctx, role.PublicId, "id=*;type=*;actions=*") + g, err := NewRoleGrant(role.PublicId, "id=*;type=*;actions=*") assert.NoError(err) require.NotNil(g) assert.Equal(g.RoleId, role.PublicId) assert.Equal(g.RawGrant, "id=*;type=*;actions=*") - g2, err := NewRoleGrant(ctx, role.PublicId, "id=u_foo;actions=read") + g2, err := NewRoleGrant(role.PublicId, "id=u_foo;actions=read") assert.NoError(err) require.NotNil(g2) assert.Equal(g2.RoleId, role.PublicId) diff --git a/internal/iam/role_test.go b/internal/iam/role_test.go index 8566ebf4619..7a0282c7e5d 100644 --- a/internal/iam/role_test.go +++ b/internal/iam/role_test.go @@ -24,7 +24,6 @@ import ( func TestNewRole(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -84,7 +83,7 @@ func TestNewRole(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRole(ctx, tt.args.scopePublicId, tt.args.opt...) + got, err := NewRole(tt.args.scopePublicId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.Contains(err.Error(), tt.wantErrMsg) @@ -101,7 +100,6 @@ func TestNewRole(t *testing.T) { func Test_RoleCreate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -122,9 +120,9 @@ func Test_RoleCreate(t *testing.T) { args: args{ role: func() *Role { id := testId(t) - role, err := NewRole(ctx, org.PublicId, WithName(id), WithDescription("description-"+id)) + role, err := NewRole(org.PublicId, WithName(id), WithDescription("description-"+id)) require.NoError(t, err) - grpId, err := newRoleId(ctx) + grpId, err := newRoleId() require.NoError(t, err) role.PublicId = grpId return role @@ -137,9 +135,9 @@ func Test_RoleCreate(t *testing.T) { args: args{ role: func() *Role { id := testId(t) - role, err := NewRole(ctx, proj.PublicId, WithName(id), WithDescription("description-"+id)) + role, err := NewRole(proj.PublicId, WithName(id), WithDescription("description-"+id)) require.NoError(t, err) - grpId, err := newRoleId(ctx) + grpId, err := newRoleId() require.NoError(t, err) role.PublicId = grpId return role @@ -151,9 +149,9 @@ func Test_RoleCreate(t *testing.T) { name: "valid-with-dup-null-names-and-descriptions", args: args{ role: func() *Role { - role, err := NewRole(ctx, org.PublicId) + role, err := NewRole(org.PublicId) require.NoError(t, err) - roleId, err := newRoleId(ctx) + roleId, err := newRoleId() require.NoError(t, err) role.PublicId = roleId return role @@ -167,9 +165,9 @@ func Test_RoleCreate(t *testing.T) { args: args{ role: func() *Role { id := testId(t) - role, err := NewRole(ctx, id) + role, err := NewRole(id) require.NoError(t, err) - roleId, err := newRoleId(ctx) + roleId, err := newRoleId() require.NoError(t, err) role.PublicId = roleId return role @@ -186,14 +184,14 @@ func Test_RoleCreate(t *testing.T) { w := db.New(conn) if tt.wantDup { r := tt.args.role.Clone().(*Role) - roleId, err := newRoleId(ctx) + roleId, err := newRoleId() require.NoError(err) r.PublicId = roleId - err = w.Create(ctx, r) + err = w.Create(context.Background(), r) require.NoError(err) } r := tt.args.role.Clone().(*Role) - err := w.Create(ctx, r) + err := w.Create(context.Background(), r) if tt.wantErr { require.Error(err) assert.Contains(err.Error(), tt.wantErrMsg) @@ -204,7 +202,7 @@ func Test_RoleCreate(t *testing.T) { foundGrp := allocRole() foundGrp.PublicId = tt.args.role.PublicId - err = w.LookupByPublicId(ctx, &foundGrp) + err = w.LookupByPublicId(context.Background(), &foundGrp) require.NoError(err) assert.Empty(cmp.Diff(r, &foundGrp, protocmp.Transform())) }) diff --git a/internal/iam/scope.go b/internal/iam/scope.go index 08c2d72e75b..216b7388da4 100644 --- a/internal/iam/scope.go +++ b/internal/iam/scope.go @@ -38,19 +38,19 @@ var ( _ Cloneable = (*Scope)(nil) ) -func NewOrg(ctx context.Context, opt ...Option) (*Scope, error) { +func NewOrg(opt ...Option) (*Scope, error) { global := AllocScope() global.PublicId = scope.Global.String() - return newScope(ctx, &global, opt...) + return newScope(&global, opt...) } -func NewProject(ctx context.Context, orgPublicId string, opt ...Option) (*Scope, error) { +func NewProject(orgPublicId string, opt ...Option) (*Scope, error) { const op = "iam.NewProject" org := AllocScope() org.PublicId = orgPublicId - p, err := newScope(ctx, &org, opt...) + p, err := newScope(&org, opt...) if err != nil { - return nil, errors.Wrap(ctx, err, op) + return nil, errors.WrapDeprecated(err, op) } return p, nil } @@ -60,10 +60,10 @@ func NewProject(ctx context.Context, orgPublicId string, opt ...Option) (*Scope, // specifies the Scope's parent and must be filled in. The type of the parent is // used to determine the type of the child. WithPrimaryAuthMethodId specifies // the primary auth method for the scope -func newScope(ctx context.Context, parent *Scope, opt ...Option) (*Scope, error) { +func newScope(parent *Scope, opt ...Option) (*Scope, error) { const op = "iam.newScope" if parent == nil || parent.PublicId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "child scope is missing its parent") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "child scope is missing its parent") } var typ scope.Type switch { @@ -73,7 +73,7 @@ func newScope(ctx context.Context, parent *Scope, opt ...Option) (*Scope, error) typ = scope.Project } if typ == scope.Unknown { - return nil, errors.New(ctx, errors.InvalidParameter, op, "unknown scope type") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "unknown scope type") } opts := getOpts(opt...) diff --git a/internal/iam/scope_test.go b/internal/iam/scope_test.go index e4c96842f01..96898e535bf 100644 --- a/internal/iam/scope_test.go +++ b/internal/iam/scope_test.go @@ -20,22 +20,21 @@ import ( func TestScope_New(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") t.Run("valid-org-with-project", func(t *testing.T) { assert, require := assert.New(t), require.New(t) w := db.New(conn) - s, err := NewOrg(ctx) + s, err := NewOrg() require.NoError(err) require.NotNil(s.Scope) - s.PublicId, err = newScopeId(ctx, scope.Org) + s.PublicId, err = newScopeId(scope.Org) require.NoError(err) - err = w.Create(ctx, s) + err = w.Create(context.Background(), s) require.NoError(err) require.NotEmpty(s.PublicId) id := testId(t) - projScope, err := NewProject(ctx, s.PublicId, WithDescription(id)) + projScope, err := NewProject(s.PublicId, WithDescription(id)) require.NoError(err) require.NotNil(projScope.Scope) assert.Equal(projScope.GetParentId(), s.PublicId) @@ -43,14 +42,14 @@ func TestScope_New(t *testing.T) { }) t.Run("unknown-scope", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := newScope(ctx, nil) + s, err := newScope(nil) require.Error(err) require.Nil(s) assert.Contains(err.Error(), "iam.newScope: child scope is missing its parent: parameter violation: error #100") }) t.Run("proj-scope-with-no-org", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - s, err := NewProject(ctx, "") + s, err := NewProject("") require.Error(err) require.Nil(s) assert.Contains(err.Error(), "iam.NewProject: iam.newScope: child scope is missing its parent: parameter violation: error #100") @@ -59,42 +58,41 @@ func TestScope_New(t *testing.T) { func TestScope_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") t.Run("valid", func(t *testing.T) { assert, require := assert.New(t), require.New(t) w := db.New(conn) - s, err := NewOrg(ctx) + s, err := NewOrg() require.NoError(err) require.NotNil(s.Scope) - s.PublicId, err = newScopeId(ctx, scope.Org) + s.PublicId, err = newScopeId(scope.Org) require.NoError(err) - err = w.Create(ctx, s) + err = w.Create(context.Background(), s) require.NoError(err) assert.NotEmpty(s.PublicId) }) t.Run("valid-with-parent", func(t *testing.T) { assert, require := assert.New(t), require.New(t) w := db.New(conn) - s, err := NewOrg(ctx) + s, err := NewOrg() require.NoError(err) require.NotNil(s.Scope) - s.PublicId, err = newScopeId(ctx, scope.Org) + s.PublicId, err = newScopeId(scope.Org) require.NoError(err) - err = w.Create(ctx, s) + err = w.Create(context.Background(), s) require.NoError(err) require.NotEmpty(s.PublicId) id := testId(t) - project, err := NewProject(ctx, s.PublicId, WithDescription(id)) + project, err := NewProject(s.PublicId, WithDescription(id)) require.NoError(err) require.NotNil(project.Scope) assert.Equal(project.Scope.ParentId, s.PublicId) assert.Equal(project.GetDescription(), id) - project.PublicId, err = newScopeId(ctx, scope.Org) + project.PublicId, err = newScopeId(scope.Org) require.NoError(err) - err = w.Create(ctx, project) + err = w.Create(context.Background(), project) require.NoError(err) assert.Equal(project.ParentId, s.PublicId) }) @@ -173,7 +171,7 @@ func TestScope_Actions(t *testing.T) { } func TestScope_ResourceType(t *testing.T) { - o, err := NewOrg(context.Background()) + o, err := NewOrg() require.NoError(t, err) assert.Equal(t, o.ResourceType(), resource.Scope) assert.Equal(t, o.GetParentId(), scope.Global.String()) @@ -204,16 +202,15 @@ func TestScope_Clone(t *testing.T) { // DB itself func TestScope_GlobalErrors(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") w := db.New(conn) t.Run("newScope errors", func(t *testing.T) { // Not allowed - _, err := newScope(ctx, &Scope{Scope: &store.Scope{PublicId: "blahblah"}}) + _, err := newScope(&Scope{Scope: &store.Scope{PublicId: "blahblah"}}) require.Error(t, err) // Should fail as there's no scope - _, err = newScope(ctx, nil) + _, err = newScope(nil) require.Error(t, err) assert.True(t, strings.Contains(err.Error(), "missing its parent")) }) @@ -222,7 +219,7 @@ func TestScope_GlobalErrors(t *testing.T) { s := AllocScope() s.Type = scope.Global.String() s.PublicId = "global" - err := s.VetForWrite(ctx, nil, db.CreateOp) + err := s.VetForWrite(context.Background(), nil, db.CreateOp) require.Error(t, err) assert.True(t, strings.Contains(err.Error(), "iam.(Scope).VetForWrite: you cannot create a global scope: parameter violation: error #100")) }) @@ -232,10 +229,10 @@ func TestScope_GlobalErrors(t *testing.T) { s.Type = scope.Org.String() s.PublicId = "o_1234" s.ParentId = "global" - err := s.VetForWrite(ctx, nil, db.CreateOp) + err := s.VetForWrite(context.Background(), nil, db.CreateOp) require.NoError(t, err) s.ParentId = "o_2345" - err = s.VetForWrite(ctx, nil, db.CreateOp) + err = s.VetForWrite(context.Background(), nil, db.CreateOp) require.Error(t, err) }) t.Run("not deletable in db", func(t *testing.T) { @@ -243,10 +240,10 @@ func TestScope_GlobalErrors(t *testing.T) { s := AllocScope() s.PublicId = "global" // Add this to validate that we did in fact delete - err := w.LookupById(ctx, &s) + err := w.LookupById(context.Background(), &s) require.NoError(t, err) require.Equal(t, s.Type, scope.Global.String()) - rows, err := w.Delete(ctx, &s) + rows, err := w.Delete(context.Background(), &s) require.Error(t, err) assert.Equal(t, 0, rows) }) diff --git a/internal/iam/testing.go b/internal/iam/testing.go index 7a5545f57bc..5951f4d66b7 100644 --- a/internal/iam/testing.go +++ b/internal/iam/testing.go @@ -22,15 +22,14 @@ import ( // ensures that the global scope contains a valid root key. func TestRepo(t testing.TB, conn *db.DB, rootWrapper wrapping.Wrapper, opt ...Option) *Repository { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) kmsCache := kms.TestKms(t, conn, rootWrapper) - wrapper, err := kmsCache.GetWrapper(ctx, scope.Global.String(), kms.KeyPurposeOplog) + wrapper, err := kmsCache.GetWrapper(context.Background(), scope.Global.String(), kms.KeyPurposeOplog) if err != nil { - err = kmsCache.CreateKeys(ctx, scope.Global.String(), kms.WithRandomReader(rand.Reader)) + err = kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader)) require.NoError(err) - wrapper, err = kmsCache.GetWrapper(ctx, scope.Global.String(), kms.KeyPurposeOplog) + wrapper, err = kmsCache.GetWrapper(context.Background(), scope.Global.String(), kms.KeyPurposeOplog) if err != nil { panic(err) } @@ -38,7 +37,7 @@ func TestRepo(t testing.TB, conn *db.DB, rootWrapper wrapping.Wrapper, opt ...Op require.NoError(err) require.NotNil(wrapper) - repo, err := NewRepository(ctx, rw, rw, kmsCache, opt...) + repo, err := NewRepository(rw, rw, kmsCache, opt...) require.NoError(err) return repo } @@ -61,21 +60,20 @@ func TestSetPrimaryAuthMethod(t testing.TB, repo *Repository, s *Scope, authMeth // TestScopes creates an org and project suitable for testing. func TestScopes(t testing.TB, repo *Repository, opt ...Option) (org *Scope, prj *Scope) { t.Helper() - ctx := context.Background() require := require.New(t) opts := getOpts(opt...) - org, err := NewOrg(ctx, opt...) + org, err := NewOrg(opt...) require.NoError(err) - org, err = repo.CreateScope(ctx, org, opts.withUserId, opt...) + org, err = repo.CreateScope(context.Background(), org, opts.withUserId, opt...) require.NoError(err) require.NotNil(org) require.NotEmpty(org.GetPublicId()) - prj, err = NewProject(ctx, org.GetPublicId(), opt...) + prj, err = NewProject(org.GetPublicId(), opt...) require.NoError(err) - prj, err = repo.CreateScope(ctx, prj, opts.withUserId, opt...) + prj, err = repo.CreateScope(context.Background(), prj, opts.withUserId, opt...) require.NoError(err) require.NotNil(prj) require.NotEmpty(prj.GetPublicId()) @@ -85,14 +83,13 @@ func TestScopes(t testing.TB, repo *Repository, opt ...Option) (org *Scope, prj func TestOrg(t testing.TB, repo *Repository, opt ...Option) *Scope { t.Helper() - ctx := context.Background() require := require.New(t) opts := getOpts(opt...) - org, err := NewOrg(ctx, opt...) + org, err := NewOrg(opt...) require.NoError(err) - org, err = repo.CreateScope(ctx, org, opts.withUserId, opt...) + org, err = repo.CreateScope(context.Background(), org, opts.withUserId, opt...) require.NoError(err) require.NotNil(org) require.NotEmpty(org.GetPublicId()) @@ -102,14 +99,13 @@ func TestOrg(t testing.TB, repo *Repository, opt ...Option) *Scope { func TestProject(t testing.TB, repo *Repository, orgId string, opt ...Option) *Scope { t.Helper() - ctx := context.Background() require := require.New(t) opts := getOpts(opt...) - proj, err := NewProject(ctx, orgId, opt...) + proj, err := NewProject(orgId, opt...) require.NoError(err) - proj, err = repo.CreateScope(ctx, proj, opts.withUserId, opt...) + proj, err = repo.CreateScope(context.Background(), proj, opts.withUserId, opt...) require.NoError(err) require.NotNil(proj) require.NotEmpty(proj.GetPublicId()) @@ -119,12 +115,11 @@ func TestProject(t testing.TB, repo *Repository, orgId string, opt ...Option) *S func testOrg(t testing.TB, repo *Repository, name, description string) (org *Scope) { t.Helper() - ctx := context.Background() require := require.New(t) - o, err := NewOrg(ctx, WithDescription(description), WithName(name)) + o, err := NewOrg(WithDescription(description), WithName(name)) require.NoError(err) - o, err = repo.CreateScope(ctx, o, "") + o, err = repo.CreateScope(context.Background(), o, "") require.NoError(err) require.NotNil(o) require.NotEmpty(o.GetPublicId()) @@ -134,12 +129,11 @@ func testOrg(t testing.TB, repo *Repository, name, description string) (org *Sco func testProject(t testing.TB, repo *Repository, orgId string, opt ...Option) *Scope { t.Helper() - ctx := context.Background() require := require.New(t) - p, err := NewProject(ctx, orgId, opt...) + p, err := NewProject(orgId, opt...) require.NoError(err) - p, err = repo.CreateScope(ctx, p, "") + p, err = repo.CreateScope(context.Background(), p, "") require.NoError(err) require.NotNil(p) require.NotEmpty(p.GetPublicId()) @@ -156,7 +150,7 @@ func testId(t testing.TB) string { func testPublicId(t testing.TB, prefix string) string { t.Helper() - publicId, err := db.NewPublicId(context.Background(), prefix) + publicId, err := db.NewPublicId(prefix) require.NoError(t, err) return publicId } @@ -165,17 +159,16 @@ func testPublicId(t testing.TB, prefix string) string { // WithName, WithDescription and WithAccountIds. func TestUser(t testing.TB, repo *Repository, scopeId string, opt ...Option) *User { t.Helper() - ctx := context.Background() require := require.New(t) - user, err := NewUser(ctx, scopeId, opt...) + user, err := NewUser(scopeId, opt...) require.NoError(err) - user, err = repo.CreateUser(ctx, user) + user, err = repo.CreateUser(context.Background(), user) require.NoError(err) require.NotEmpty(user.PublicId) opts := getOpts(opt...) if len(opts.withAccountIds) > 0 { - _, err := repo.AddUserAccounts(ctx, user.PublicId, user.Version, opts.withAccountIds) + _, err := repo.AddUserAccounts(context.Background(), user.PublicId, user.Version, opts.withAccountIds) require.NoError(err) } return user @@ -184,16 +177,15 @@ func TestUser(t testing.TB, repo *Repository, scopeId string, opt ...Option) *Us // TestRole creates a role suitable for testing. func TestRole(t testing.TB, conn *db.DB, scopeId string, opt ...Option) *Role { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) - role, err := NewRole(ctx, scopeId, opt...) + role, err := NewRole(scopeId, opt...) require.NoError(err) - id, err := newRoleId(ctx) + id, err := newRoleId() require.NoError(err) role.PublicId = id - err = rw.Create(ctx, role) + err = rw.Create(context.Background(), role) require.NoError(err) require.NotEmpty(role.PublicId) @@ -208,7 +200,7 @@ func TestRoleGrant(t testing.TB, conn *db.DB, roleId, grant string, opt ...Optio require := require.New(t) rw := db.New(conn) - g, err := NewRoleGrant(context.Background(), roleId, grant, opt...) + g, err := NewRoleGrant(roleId, grant, opt...) require.NoError(err) err = rw.Create(context.Background(), g) require.NoError(err) @@ -218,16 +210,15 @@ func TestRoleGrant(t testing.TB, conn *db.DB, roleId, grant string, opt ...Optio // TestGroup creates a group suitable for testing. func TestGroup(t testing.TB, conn *db.DB, scopeId string, opt ...Option) *Group { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) - grp, err := NewGroup(ctx, scopeId, opt...) + grp, err := NewGroup(scopeId, opt...) require.NoError(err) - id, err := newGroupId(ctx) + id, err := newGroupId() require.NoError(err) grp.PublicId = id - err = rw.Create(ctx, grp) + err = rw.Create(context.Background(), grp) require.NoError(err) require.NotEmpty(grp.PublicId) return grp @@ -235,13 +226,12 @@ func TestGroup(t testing.TB, conn *db.DB, scopeId string, opt ...Option) *Group func TestGroupMember(t testing.TB, conn *db.DB, groupId, userId string, opt ...Option) *GroupMemberUser { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) - gm, err := NewGroupMemberUser(ctx, groupId, userId) + gm, err := NewGroupMemberUser(groupId, userId) require.NoError(err) require.NotNil(gm) - err = rw.Create(ctx, gm) + err = rw.Create(context.Background(), gm) require.NoError(err) require.NotEmpty(gm.CreateTime) return gm @@ -249,13 +239,12 @@ func TestGroupMember(t testing.TB, conn *db.DB, groupId, userId string, opt ...O func TestUserRole(t testing.TB, conn *db.DB, roleId, userId string, opt ...Option) *UserRole { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) - r, err := NewUserRole(ctx, roleId, userId, opt...) + r, err := NewUserRole(roleId, userId, opt...) require.NoError(err) - err = rw.Create(ctx, r) + err = rw.Create(context.Background(), r) require.NoError(err) return r } @@ -264,7 +253,7 @@ func TestGroupRole(t testing.TB, conn *db.DB, roleId, grpId string, opt ...Optio t.Helper() require := require.New(t) rw := db.New(conn) - r, err := NewGroupRole(context.Background(), roleId, grpId, opt...) + r, err := NewGroupRole(roleId, grpId, opt...) require.NoError(err) err = rw.Create(context.Background(), r) @@ -276,7 +265,7 @@ func TestManagedGroupRole(t testing.TB, conn *db.DB, roleId, managedGrpId string t.Helper() require := require.New(t) rw := db.New(conn) - r, err := NewManagedGroupRole(context.Background(), roleId, managedGrpId, opt...) + r, err := NewManagedGroupRole(roleId, managedGrpId, opt...) require.NoError(err) err = rw.Create(context.Background(), r) @@ -315,7 +304,7 @@ func testAccount(t testing.TB, conn *db.DB, scopeId, authMethodId, userId string require.NoError(err) require.Equal(1, count) - id, err := db.NewPublicId(ctx, accountPrefix) + id, err := db.NewPublicId(accountPrefix) require.NoError(err) acct := &authAccount{ @@ -346,15 +335,14 @@ func testAuthMethod(t testing.TB, conn *db.DB, scopeId string) string { authMethodPrefix = "am_" ) t.Helper() - ctx := context.Background() require := require.New(t) require.NotNil(conn) require.NotEmpty(scopeId) - id, err := db.NewPublicId(ctx, authMethodPrefix) + id, err := db.NewPublicId(authMethodPrefix) require.NoError(err) rw := db.New(conn) - _, err = rw.Exec(ctx, insertAuthMethod, []any{id, scopeId}) + _, err = rw.Exec(context.Background(), insertAuthMethod, []any{id, scopeId}) require.NoError(err) return id } diff --git a/internal/iam/user.go b/internal/iam/user.go index 6546d4eb670..b2b6f682fe7 100644 --- a/internal/iam/user.go +++ b/internal/iam/user.go @@ -36,11 +36,11 @@ var ( // NewUser creates a new in memory user and allows options: // WithName - to specify the user's friendly name and WithDescription - to // specify a user description -func NewUser(ctx context.Context, scopeId string, opt ...Option) (*User, error) { +func NewUser(scopeId string, opt ...Option) (*User, error) { const op = "iam.NewUser" opts := getOpts(opt...) if scopeId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing scope id") } u := &User{ User: &store.User{ diff --git a/internal/iam/user_test.go b/internal/iam/user_test.go index 734f929e893..7e8f4741e4e 100644 --- a/internal/iam/user_test.go +++ b/internal/iam/user_test.go @@ -19,7 +19,6 @@ import ( func TestNewUser(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) org, _ := TestScopes(t, TestRepo(t, conn, wrapper)) @@ -66,7 +65,7 @@ func TestNewUser(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewUser(ctx, tt.args.orgPublicId, tt.args.opt...) + got, err := NewUser(tt.args.orgPublicId, tt.args.opt...) if tt.wantErr { require.Error(err) assert.Contains(err.Error(), tt.wantErrMsg) @@ -96,7 +95,6 @@ func Test_UserHardcoded(t *testing.T) { func Test_UserCreate(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) org, _ := TestScopes(t, TestRepo(t, conn, wrapper)) @@ -104,30 +102,30 @@ func Test_UserCreate(t *testing.T) { t.Run("valid-user", func(t *testing.T) { assert, require := assert.New(t), require.New(t) w := db.New(conn) - user, err := NewUser(ctx, org.PublicId) + user, err := NewUser(org.PublicId) require.NoError(err) - id, err := newUserId(ctx) + id, err := newUserId() require.NoError(err) user.PublicId = id - err = w.Create(ctx, user) + err = w.Create(context.Background(), user) require.NoError(err) require.NotEmpty(user.PublicId) foundUser := AllocUser() foundUser.PublicId = user.PublicId - err = w.LookupByPublicId(ctx, &foundUser) + err = w.LookupByPublicId(context.Background(), &foundUser) require.NoError(err) assert.Equal(user, &foundUser) }) t.Run("bad-orgid", func(t *testing.T) { assert, require := assert.New(t), require.New(t) w := db.New(conn) - user, err := NewUser(ctx, id) + user, err := NewUser(id) require.NoError(err) - id, err := newUserId(ctx) + id, err := newUserId() require.NoError(err) user.PublicId = id - err = w.Create(ctx, user) + err = w.Create(context.Background(), user) require.Error(err) assert.Equal("db.Create: dbw.Create: error before write: iam.(User).VetForWrite: iam.validateScopeForWrite: scope is not found: search issue: error #1100", err.Error()) }) @@ -335,7 +333,6 @@ func Test_UserGetScope(t *testing.T) { func TestUser_Clone(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) repo := TestRepo(t, conn, wrapper) @@ -351,20 +348,20 @@ func TestUser_Clone(t *testing.T) { assert := assert.New(t) w := db.New(conn) - user, err := NewUser(ctx, org.PublicId) + user, err := NewUser(org.PublicId) assert.NoError(err) - id, err := newUserId(ctx) + id, err := newUserId() assert.NoError(err) user.PublicId = id - err = w.Create(ctx, user) + err = w.Create(context.Background(), user) assert.NoError(err) - user2, err := NewUser(ctx, org.PublicId) + user2, err := NewUser(org.PublicId) assert.NoError(err) - id, err = newUserId(ctx) + id, err = newUserId() assert.NoError(err) user2.PublicId = id - err = w.Create(ctx, user2) + err = w.Create(context.Background(), user2) assert.NoError(err) cp := user.Clone() diff --git a/internal/oplog/writer.go b/internal/oplog/writer.go index 537c029e3b6..5a8536d31ed 100644 --- a/internal/oplog/writer.go +++ b/internal/oplog/writer.go @@ -40,10 +40,10 @@ func (w *Writer) hasTable(ctx context.Context, tableName string) (bool, error) { func (w *Writer) createTableLike(ctx context.Context, existingTableName string, newTableName string) error { const op = "oplog.(Writer).createTableLike" if existingTableName == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing existing table name") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing existing table name") } if newTableName == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing new table name") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing new table name") } sql := fmt.Sprintf( diff --git a/internal/perms/acl.go b/internal/perms/acl.go index ebbeee79013..94ad3d1ba7a 100644 --- a/internal/perms/acl.go +++ b/internal/perms/acl.go @@ -12,35 +12,10 @@ import ( "github.com/hashicorp/boundary/sdk/pbs/controller/api/resources/scopes" ) -// AclGrant is used to decouple API-based grants from those we utilize for ACLs. -// Notably it uses a single ID per grant instead of multiple IDs. -type AclGrant struct { - // The scope ID, which will be a project ID or an org ID - scope Scope - - // The ID to use - id string - - // The type, if provided - typ resource.Type - - // The set of actions being granted - actions actionSet - - // The set of output fields granted - OutputFields *OutputFields -} - -// Actions returns the actions as a slice from the internal map, along with the -// string representations of those actions. -func (a AclGrant) Actions() ([]action.Type, []string) { - return a.actions.Actions() -} - // ACL provides an entry point into the permissions engine for determining if an // action is allowed on a resource based on a principal's (user or group) grants. type ACL struct { - scopeMap map[string][]AclGrant + scopeMap map[string][]Grant } // ACLResults provides a type for the permission's engine results so that we can @@ -52,7 +27,7 @@ type ACLResults struct { OutputFields *OutputFields // This is included but unexported for testing/debugging - scopeMap map[string][]AclGrant + scopeMap map[string][]Grant } // Permission provides information about the specific @@ -90,38 +65,19 @@ type Resource struct { Pin string `json:"pin,omitempty"` } -// NewACL creates an ACL from the grants provided. Note that this converts the -// API-based Grants to AclGrants. +// NewACL creates an ACL from the grants provided. func NewACL(grants ...Grant) ACL { ret := ACL{ - scopeMap: make(map[string][]AclGrant, len(grants)), + scopeMap: make(map[string][]Grant, len(grants)), } for _, grant := range grants { - switch { - case len(grant.ids) > 0: - for _, id := range grant.ids { - ret.scopeMap[grant.scope.Id] = append(ret.scopeMap[grant.scope.Id], aclGrantFromGrant(grant, id)) - } - default: - // This handles the no-ID case as well as the deprecated single-ID case - ret.scopeMap[grant.scope.Id] = append(ret.scopeMap[grant.scope.Id], aclGrantFromGrant(grant, grant.id)) - } + ret.scopeMap[grant.scope.Id] = append(ret.scopeMap[grant.scope.Id], grant) } return ret } -func aclGrantFromGrant(grant Grant, id string) AclGrant { - return AclGrant{ - scope: grant.scope, - id: id, - typ: grant.typ, - actions: grant.actions, - OutputFields: grant.OutputFields, - } -} - // Allowed determines if the grants for an ACL allow an action for a resource. func (a ACL) Allowed(r Resource, aType action.Type, userId string, opt ...Option) (results ACLResults) { opts := getOpts(opt...) diff --git a/internal/perms/acl_test.go b/internal/perms/acl_test.go index 890aacc26fe..fd1a5dac865 100644 --- a/internal/perms/acl_test.go +++ b/internal/perms/acl_test.go @@ -4,7 +4,6 @@ package perms import ( - "context" "encoding/json" "fmt" "testing" @@ -26,8 +25,6 @@ type scopeGrant struct { func Test_ACLAllowed(t *testing.T) { t.Parallel() - ctx := context.Background() - type actionAuthorized struct { action action.Type authorized bool @@ -47,8 +44,8 @@ func Test_ACLAllowed(t *testing.T) { { scope: "o_a", grants: []string{ - "ids=ampw_bar,ampw_baz;actions=read,update", - "id=ampw_bop;actions=read:self,update", + "id=ampw_bar;actions=read,update", + "id=ampw_baz;actions=read:self,update", "type=host-catalog;actions=create", "type=target;actions=list", "id=*;type=host-set;actions=list,create", @@ -58,7 +55,7 @@ func Test_ACLAllowed(t *testing.T) { scope: "o_b", grants: []string{ "id=*;type=host-set;actions=list,create", - "ids=hcst_mypin;type=host;actions=*;output_fields=name,description", + "id=hcst_mypin;type=host;actions=*;output_fields=name,description", "id=*;type=*;actions=authenticate", "id=*;type=*;output_fields=id", }, @@ -121,7 +118,7 @@ func Test_ACLAllowed(t *testing.T) { }, }, { - name: "matching scope and id and matching action first id", + name: "matching scope and id and matching action", resource: Resource{ScopeId: "o_a", Id: "ampw_bar"}, scopeGrants: commonGrants, actionsAuthorized: []actionAuthorized{ @@ -130,16 +127,6 @@ func Test_ACLAllowed(t *testing.T) { {action: action.Delete}, }, }, - { - name: "matching scope and id and matching action second id", - resource: Resource{ScopeId: "o_a", Id: "ampw_baz"}, - scopeGrants: commonGrants, - actionsAuthorized: []actionAuthorized{ - {action: action.Read, authorized: true}, - {action: action.Update, authorized: true}, - {action: action.Delete}, - }, - }, { name: "matching scope and type and all action with valid pin", resource: Resource{ScopeId: "o_b", Pin: "hcst_mypin", Type: resource.Host}, @@ -200,7 +187,7 @@ func Test_ACLAllowed(t *testing.T) { }, }, { - name: "matching scope, type, action, random id and bad pin first id", + name: "matching scope, type, action, random id and bad pin", resource: Resource{ScopeId: "o_a", Id: "anything", Type: resource.HostCatalog, Pin: "ampw_bar"}, scopeGrants: commonGrants, actionsAuthorized: []actionAuthorized{ @@ -209,16 +196,6 @@ func Test_ACLAllowed(t *testing.T) { {action: action.Read}, }, }, - { - name: "matching scope, type, action, random id and bad pin second id", - resource: Resource{ScopeId: "o_a", Id: "anything", Type: resource.HostCatalog, Pin: "ampw_baz"}, - scopeGrants: commonGrants, - actionsAuthorized: []actionAuthorized{ - {action: action.Update}, - {action: action.Delete}, - {action: action.Read}, - }, - }, { name: "wrong scope and matching type", resource: Resource{ScopeId: "o_bad", Type: resource.HostSet}, @@ -329,7 +306,7 @@ func Test_ACLAllowed(t *testing.T) { }, }, { - name: "read self with top level read first id", + name: "read self with top level read", resource: Resource{ScopeId: "o_a", Id: "ampw_bar"}, scopeGrants: commonGrants, actionsAuthorized: []actionAuthorized{ @@ -337,18 +314,9 @@ func Test_ACLAllowed(t *testing.T) { {action: action.ReadSelf, authorized: true}, }, }, - { - name: "read self with top level read second id", - resource: Resource{ScopeId: "o_a", Id: "ampw_baz"}, - scopeGrants: commonGrants, - actionsAuthorized: []actionAuthorized{ - {action: action.Read, authorized: true}, - {action: action.ReadSelf, authorized: true}, - }, - }, { name: "read self only", - resource: Resource{ScopeId: "o_a", Id: "ampw_bop"}, + resource: Resource{ScopeId: "o_a", Id: "ampw_baz"}, scopeGrants: commonGrants, actionsAuthorized: []actionAuthorized{ {action: action.Read}, @@ -392,7 +360,7 @@ func Test_ACLAllowed(t *testing.T) { var grants []Grant for _, sg := range test.scopeGrants { for _, g := range sg.grants { - grant, err := Parse(ctx, sg.scope, g, WithAccountId(test.accountId), WithUserId(test.userId)) + grant, err := Parse(sg.scope, g, WithAccountId(test.accountId), WithUserId(test.userId)) require.NoError(t, err) grants = append(grants, grant) } @@ -413,10 +381,6 @@ func Test_ACLAllowed(t *testing.T) { } func TestACL_ListPermissions(t *testing.T) { - t.Parallel() - - ctx := context.Background() - tests := []struct { name string userId string @@ -534,7 +498,8 @@ func TestACL_ListPermissions(t *testing.T) { scope: "o_1", grants: []string{ "id=s_1;type=session;actions=list,read", - "ids=s_2,s_3;type=session;actions=list,read", + "id=s_2;type=session;actions=list,read", + "id=s_3;type=session;actions=list,read", }, }, }, @@ -616,7 +581,8 @@ func TestACL_ListPermissions(t *testing.T) { scope: "o_1", grants: []string{ "id=s_1;type=session;actions=list,no-op", - "ids=s_2,s_3;type=session;actions=list,no-op", + "id=s_2;type=session;actions=list,no-op", + "id=s_3;type=session;actions=list,no-op", }, }, }, @@ -732,7 +698,8 @@ func TestACL_ListPermissions(t *testing.T) { scope: "o_1", grants: []string{ "id=s_1;type=session;actions=list,read", - "ids=s_2,s_3;type=session;actions=list,read", + "id=s_2;type=session;actions=list,read", + "id=s_3;type=session;actions=list,read", }, }, { @@ -848,7 +815,7 @@ func TestACL_ListPermissions(t *testing.T) { var grants []Grant for _, sg := range tt.aclGrants { for _, g := range sg.grants { - grant, err := Parse(ctx, sg.scope, g, WithSkipFinalValidation(tt.skipGrantValidationChecking)) + grant, err := Parse(sg.scope, g, WithSkipFinalValidation(tt.skipGrantValidationChecking)) require.NoError(t, err) grants = append(grants, grant) } @@ -880,8 +847,6 @@ func TestJsonMarshal(t *testing.T) { func Test_AnonRestrictions(t *testing.T) { t.Parallel() - ctx := context.Background() - type input struct { name string grant string @@ -893,30 +858,16 @@ func Test_AnonRestrictions(t *testing.T) { name: "id-specific", grant: "id=foobar;actions=%s", }, - { - name: "ids-specific", - grant: "ids=foobar;actions=%s", - }, { name: "wildcard-id", grant: "id=*;type=%s;actions=%s", templatedType: true, shouldHaveSuccess: true, }, - { - name: "wildcard-ids", - grant: "ids=*;type=%s;actions=%s", - templatedType: true, - shouldHaveSuccess: true, - }, { name: "wildcard-id-and-type", grant: "id=*;type=*;actions=%s", }, - { - name: "wildcard-ids-and-type", - grant: "ids=*;type=*;actions=%s", - }, { name: "no-id", grant: "type=%s;actions=%s", @@ -932,19 +883,10 @@ func Test_AnonRestrictions(t *testing.T) { if i == resource.Controller || i == resource.Worker { continue } - for j := action.Type(1); j <= action.Download; j++ { - id := "foobar" - prefixes := globals.ResourcePrefixesFromType(resource.Type(i)) - if len(prefixes) > 0 { - id = fmt.Sprintf("%s_%s", prefixes[0], id) - // If it's global scope, correct it - if id == "global_foobar" { - id = "global" - } - } + for j := action.Type(1); j <= action.ReadCertificateAuthority; j++ { res := Resource{ ScopeId: scope.Global.String(), - Id: id, + Id: "foobar", Type: resource.Type(i), } grant := test.grant @@ -954,7 +896,7 @@ func Test_AnonRestrictions(t *testing.T) { grant = fmt.Sprintf(grant, action.Type(j).String()) } - parsedGrant, err := Parse(ctx, scope.Global.String(), grant, WithSkipFinalValidation(true)) + parsedGrant, err := Parse(scope.Global.String(), grant, WithSkipFinalValidation(true)) require.NoError(err) acl := NewACL(parsedGrant) diff --git a/internal/perms/grants.go b/internal/perms/grants.go index fc45b03f1ed..8d472625ab9 100644 --- a/internal/perms/grants.go +++ b/internal/perms/grants.go @@ -4,7 +4,6 @@ package perms import ( - "context" "encoding/json" "fmt" "sort" @@ -16,27 +15,8 @@ import ( "github.com/hashicorp/boundary/internal/types/action" "github.com/hashicorp/boundary/internal/types/resource" "github.com/hashicorp/boundary/internal/types/scope" - "golang.org/x/exp/slices" ) -type actionSet map[action.Type]bool - -// Actions is a helper that goes through the map and returns both the actual -// types of actions as a slice and the equivalent strings -func (a actionSet) Actions() (typs []action.Type, strs []string) { - typs = make([]action.Type, 0, len(a)) - strs = make([]string, 0, len(a)) - for k, v := range a { - // Nothing should be in there if not true, but doesn't hurt to validate - if !v { - continue - } - typs = append(typs, k) - strs = append(strs, k.String()) - } - return -} - // GrantTuple is simply a struct that can be reference from other code to return // a set of scopes and grants to parse type GrantTuple struct { @@ -57,20 +37,17 @@ type Scope struct { // Grant is a Go representation of a parsed grant type Grant struct { - // The scope, containing the ID and type + // The scope ID, which will be a project ID or an org ID scope Scope - // The ID of the grant, if provided. Deprecated in favor of ids. + // The ID in the grant, if provided. id string - // The IDs in the grant, if provided - ids []string - // The type, if provided typ resource.Type // The set of actions being granted - actions actionSet + actions map[action.Type]bool // The set of output fields granted OutputFields *OutputFields @@ -80,25 +57,26 @@ type Grant struct { actionsBeingParsed []string } -// Id returns the ID the grant refers to, if any func (g Grant) Id() string { return g.id } -// Ids returns the IDs the grant refers to, if any -func (g Grant) Ids() []string { - return g.ids -} - -// Type returns the type the grant refers to, or Unknown func (g Grant) Type() resource.Type { return g.typ } -// Actions returns the actions as a slice from the internal map, along with the -// string representations of those actions. -func (g Grant) Actions() ([]action.Type, []string) { - return g.actions.Actions() +func (g Grant) Actions() (typs []action.Type, strs []string) { + typs = make([]action.Type, 0, len(g.actions)) + strs = make([]string, 0, len(g.actions)) + for k, v := range g.actions { + // Nothing should be in there if not true, but doesn't hurt to validate + if !v { + continue + } + typs = append(typs, k) + strs = append(strs, k.String()) + } + return } // hasActionOrSubaction checks whether a grant's action set contains the given @@ -119,13 +97,8 @@ func (g Grant) clone() *Grant { ret := &Grant{ scope: g.scope, id: g.id, - ids: g.ids, typ: g.typ, } - if g.ids != nil { - ret.ids = make([]string, len(g.ids)) - copy(ret.ids, g.ids) - } if g.actionsBeingParsed != nil { ret.actionsBeingParsed = append(ret.actionsBeingParsed, g.actionsBeingParsed...) } @@ -153,10 +126,6 @@ func (g Grant) CanonicalString() string { builder = append(builder, fmt.Sprintf("id=%s", g.id)) } - if len(g.ids) > 0 { - builder = append(builder, fmt.Sprintf("ids=%s", strings.Join(g.ids, ","))) - } - if g.typ != resource.Unknown { builder = append(builder, fmt.Sprintf("type=%s", g.typ.String())) } @@ -178,15 +147,12 @@ func (g Grant) CanonicalString() string { } // MarshalJSON provides a custom marshaller for grants -func (g Grant) MarshalJSON(ctx context.Context) ([]byte, error) { +func (g Grant) MarshalJSON() ([]byte, error) { const op = "perms.(Grant).MarshalJSON" res := make(map[string]any, 4) if g.id != "" { res["id"] = g.id } - if len(g.ids) > 0 { - res["ids"] = g.ids - } if g.typ != resource.Unknown { res["type"] = g.typ.String() } @@ -203,7 +169,7 @@ func (g Grant) MarshalJSON(ctx context.Context) ([]byte, error) { } b, err := json.Marshal(res) if err != nil { - return nil, errors.Wrap(ctx, err, op, errors.WithCode(errors.Encode)) + return nil, errors.WrapDeprecated(err, op, errors.WithCode(errors.Encode)) } return b, nil } @@ -211,47 +177,33 @@ func (g Grant) MarshalJSON(ctx context.Context) ([]byte, error) { // This is purposefully unexported since the values being set here are not being // checked for validity. This should only be called by the main parsing function // when JSON is detected. -func (g *Grant) unmarshalJSON(ctx context.Context, data []byte) error { +func (g *Grant) unmarshalJSON(data []byte) error { const op = "perms.(Grant).unmarshalJSON" raw := make(map[string]any, 4) if err := json.Unmarshal(data, &raw); err != nil { - return errors.Wrap(ctx, err, op, errors.WithCode(errors.Decode)) + return errors.WrapDeprecated(err, op, errors.WithCode(errors.Decode)) } if rawId, ok := raw["id"]; ok { id, ok := rawId.(string) if !ok { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as string", "id")) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as string", "id")) } g.id = id } - if rawIds, ok := raw["ids"]; ok { - ids, ok := rawIds.([]any) - if !ok { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as array", "ids")) - } - g.ids = make([]string, len(ids)) - for i, id := range ids { - idStr, ok := id.(string) - if !ok { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q element %q as string", "ids", id)) - } - g.ids[i] = idStr - } - } if rawType, ok := raw["type"]; ok { typ, ok := rawType.(string) if !ok { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as string", "type")) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as string", "type")) } g.typ = resource.Map[typ] if g.typ == resource.Unknown { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unknown type specifier %q", typ)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unknown type specifier %q", typ)) } } if rawActions, ok := raw["actions"]; ok { interfaceActions, ok := rawActions.([]any) if !ok { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as array", "actions")) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as array", "actions")) } if len(interfaceActions) > 0 { g.actionsBeingParsed = make([]string, 0, len(interfaceActions)) @@ -259,9 +211,9 @@ func (g *Grant) unmarshalJSON(ctx context.Context, data []byte) error { actionStr, ok := v.(string) switch { case !ok: - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %v in actions array as string", v)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %v in actions array as string", v)) case actionStr == "": - return errors.New(ctx, errors.InvalidParameter, op, "empty action found") + return errors.NewDeprecated(errors.InvalidParameter, op, "empty action found") default: g.actionsBeingParsed = append(g.actionsBeingParsed, strings.ToLower(actionStr)) } @@ -271,7 +223,7 @@ func (g *Grant) unmarshalJSON(ctx context.Context, data []byte) error { if rawOutputFields, ok := raw["output_fields"]; ok { interfaceOutputFields, ok := rawOutputFields.([]any) if !ok { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as array", "output_fields")) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %q as array", "output_fields")) } // We do the make here because we detect later if the field was set but // no values given @@ -285,7 +237,7 @@ func (g *Grant) unmarshalJSON(ctx context.Context, data []byte) error { field, ok := v.(string) switch { case !ok: - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %v in output_fields array as string", v)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unable to interpret %v in output_fields array as string", v)) default: fields = append(fields, field) } @@ -296,7 +248,7 @@ func (g *Grant) unmarshalJSON(ctx context.Context, data []byte) error { return nil } -func (g *Grant) unmarshalText(ctx context.Context, grantString string) error { +func (g *Grant) unmarshalText(grantString string) error { const op = "perms.(Grant).unmarshalText" segments := strings.Split(grantString, ";") for _, segment := range segments { @@ -305,25 +257,22 @@ func (g *Grant) unmarshalText(ctx context.Context, grantString string) error { // Ensure we don't accept "foo=bar=baz", "=foo", or "foo=" switch { case len(kv) != 2: - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("segment %q not formatted correctly, wrong number of equal signs", segment)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("segment %q not formatted correctly, wrong number of equal signs", segment)) case len(kv[0]) == 0: - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("segment %q not formatted correctly, missing key", segment)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("segment %q not formatted correctly, missing key", segment)) case len(kv[1]) == 0 && kv[0] != "output_fields": - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("segment %q not formatted correctly, missing value", segment)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("segment %q not formatted correctly, missing value", segment)) } switch kv[0] { case "id": g.id = kv[1] - case "ids": - g.ids = strings.Split(kv[1], ",") - case "type": typeString := strings.ToLower(kv[1]) g.typ = resource.Map[typeString] if g.typ == resource.Unknown { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unknown type specifier %q", typeString)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unknown type specifier %q", typeString)) } case "actions": @@ -332,7 +281,7 @@ func (g *Grant) unmarshalText(ctx context.Context, grantString string) error { g.actionsBeingParsed = make([]string, 0, len(actions)) for _, action := range actions { if action == "" { - return errors.New(ctx, errors.InvalidParameter, op, "empty action found") + return errors.NewDeprecated(errors.InvalidParameter, op, "empty action found") } g.actionsBeingParsed = append(g.actionsBeingParsed, strings.ToLower(action)) } @@ -358,13 +307,13 @@ func (g *Grant) unmarshalText(ctx context.Context, grantString string) error { // // The scope must be the org and project where this grant originated, not the // request. -func Parse(ctx context.Context, scopeId, grantString string, opt ...Option) (Grant, error) { +func Parse(scopeId, grantString string, opt ...Option) (Grant, error) { const op = "perms.Parse" if len(grantString) == 0 { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, "missing grant string") + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing grant string") } if scopeId == "" { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, "missing scope id") + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, "missing scope id") } grantString = strings.ToValidUTF8(grantString, string(unicode.ReplacementChar)) @@ -379,234 +328,169 @@ func Parse(ctx context.Context, scopeId, grantString string, opt ...Option) (Gra case strings.HasPrefix(scopeId, scope.Project.Prefix()): grant.scope.Type = scope.Project default: - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, "invalid scope type") + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, "invalid scope type") } switch { case grantString[0] == '{': - if err := grant.unmarshalJSON(ctx, []byte(grantString)); err != nil { - return Grant{}, errors.Wrap(ctx, err, op, errors.WithMsg("unable to parse JSON grant string")) + if err := grant.unmarshalJSON([]byte(grantString)); err != nil { + return Grant{}, errors.WrapDeprecated(err, op, errors.WithMsg("unable to parse JSON grant string")) } default: - if err := grant.unmarshalText(ctx, grantString); err != nil { - return Grant{}, errors.Wrap(ctx, err, op, errors.WithMsg("unable to parse grant string")) + if err := grant.unmarshalText(grantString); err != nil { + return Grant{}, errors.WrapDeprecated(err, op, errors.WithMsg("unable to parse grant string")) } } - if grant.id != "" && len(grant.ids) > 0 { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("input grant string %q contains both %q and %q fields", grantString, "id", "ids")) - } - if len(grant.ids) > 1 && slices.Contains(grant.ids, "*") { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("input grant string %q contains both wildcard and non-wildcard values in %q field", grantString, "ids")) - } - opts := getOpts(opt...) - var grantIds []string - var deprecatedId bool - switch { - case grant.id != "": - grantIds = []string{grant.id} - deprecatedId = true - case len(grant.ids) > 0: - grantIds = grant.ids - // Ensure we aren't seeing mixed types. We will have already filtered - // out the wildcard case above. - if len(grant.ids) > 1 { - var seenType resource.Type - for i, id := range grantIds { - if i == 0 { - seenType = globals.ResourceTypeFromPrefix(id) - continue - } - if seenType != globals.ResourceTypeFromPrefix(id) { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("input grant string %q contains ids of differently-typed resources", grantString)) - } + // Check for templated values ID, and substitute in with the authenticated + // values if so. If we are using a dummy user or account ID, store the + // original ID and return it at the end; this is usually the case when + // validating grant formats. + var origId string + if grant.id != "" && strings.HasPrefix(grant.id, "{{") { + id := strings.TrimSuffix(strings.TrimPrefix(grant.id, "{{"), "}}") + id = strings.TrimSpace(id) + switch id { + case "user.id", ".User.Id": + if opts.withUserId != "" { + grant.id = strings.ToValidUTF8(opts.withUserId, string(unicode.ReplacementChar)) } - } - } - // It's possible that there is no id in a grant. In that case we still need - // to validate it and build up the parsed Grant. We insert an empty ID value - // in this case; the code below will check that it's non-empty before - // running any ID-specific logic on it. - if len(grantIds) == 0 { - grantIds = []string{""} - } - for i, currId := range grantIds { - // Check for templated values ID, and substitute in with the authenticated - // values if so. If we are using a dummy user or account ID, store the - // original ID and return it at the end; this is usually the case when - // validating grant formats. - var origId string - if currId != "" { - if strings.HasPrefix(currId, "{{") { - id := strings.TrimSuffix(strings.TrimPrefix(currId, "{{"), "}}") - id = strings.TrimSpace(id) - switch id { - case "user.id", ".User.Id": - if opts.withUserId != "" { - grantIds[i] = strings.ToValidUTF8(opts.withUserId, string(unicode.ReplacementChar)) - } else { - // Otherwise, substitute in a dummy value - origId = currId - grantIds[i] = "u_dummy" - } - case "account.id", ".Account.Id": - if opts.withAccountId != "" { - grantIds[i] = strings.ToValidUTF8(opts.withAccountId, string(unicode.ReplacementChar)) - } else { - origId = currId - grantIds[i] = "acctoidc_dummy" - } - default: - fieldName := "ids" - if deprecatedId { - fieldName = "id" - } - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unknown template %q in grant %q value", currId, fieldName)) - } + // Otherwise, substitute in a dummy value + origId = grant.id + grant.id = "u_dummy" + case "account.id", ".Account.Id": + if opts.withAccountId != "" { + grant.id = strings.ToValidUTF8(opts.withAccountId, string(unicode.ReplacementChar)) } + origId = grant.id + grant.id = "acctoidc_dummy" + default: + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unknown template %q in grant %q value", grant.id, "id")) } + } + + if err := grant.validateType(); err != nil { + return Grant{}, errors.WrapDeprecated(err, op) + } + + if err := grant.parseAndValidateActions(); err != nil { + return Grant{}, errors.WrapDeprecated(err, op) + } - // We don't need to do these twice as they don't depend on IDs; they - // also clear state such as actionsBeingParsed - if i == 0 { - if err := grant.validateType(ctx); err != nil { - return Grant{}, errors.Wrap(ctx, err, op) + if !opts.withSkipFinalValidation { + switch { + case grant.id == "*": + // Matches + // id=*;type=sometype;actions=foo,bar + // or + // id=*;type=*;actions=foo,bar + // This can be a non-unknown type or wildcard + if grant.typ == resource.Unknown { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains wildcard id and no specified type", grant.CanonicalString())) } - if err := grant.parseAndValidateActions(ctx); err != nil { - return Grant{}, errors.Wrap(ctx, err, op) + case grant.id != "": + // Non-wildcard but specified ID. This can match + // id=foo_bar;actions=foo,bar + // or + // id=foo_bar;type=sometype;actions=foo,bar + // or + // id=foo_bar;type=*;actions=foo,bar + // but notably the specified types have to actually make sense: in + // the second example the type corresponding to the ID must have the + // specified type as a child type; in the third the ID must be a + // type that has child types. + idType := globals.ResourceTypeFromPrefix(grant.id) + if idType == resource.Unknown { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains an id %q of an unknown resource type", grant.CanonicalString(), grant.id)) } - } - - if !opts.withSkipFinalValidation { - switch { - case grantIds[i] == "*": - // Matches - // id=*;type=sometype;actions=foo,bar - // or - // id=*;type=*;actions=foo,bar - // This can be a non-unknown type or wildcard - if grant.typ == resource.Unknown { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains wildcard id and no specified type", grant.CanonicalString())) + switch grant.typ { + case resource.Unknown: + // This is fine as-is but we do not support collection actions + // without a type (either directly specified or wildcard) so + // check that + if grant.actions[action.Create] || + grant.actions[action.List] { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains create or list action in a format that does not allow these", grant.CanonicalString())) + } + case resource.All: + // Verify that the ID is a type that has child types + if !resource.HasChildTypes(idType) { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains an id that does not support child types", grant.CanonicalString())) } - case grantIds[i] != "": - // Non-wildcard but specified ID. This can match - // id=foo_bar;actions=foo,bar - // or - // id=foo_bar;type=sometype;actions=foo,bar - // or - // id=foo_bar;type=*;actions=foo,bar - // but notably the specified types have to actually make sense: in - // the second example the type corresponding to the ID must have the - // specified type as a child type; in the third the ID must be a - // type that has child types. - idType := globals.ResourceTypeFromPrefix(grantIds[i]) - if idType == resource.Unknown { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains an id %q of an unknown resource type", grant.CanonicalString(), grantIds[i])) + default: + // Specified resource type, verify it's a child + if resource.Parent(grant.typ) != idType { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains type %s that is not a child type of the type (%s) of the specified id", grant.CanonicalString(), grant.typ.String(), idType.String())) } - switch grant.typ { - case resource.Unknown: - // This is fine as-is but we do not support collection actions - // without a type (either directly specified or wildcard) so - // check that - if grant.actions[action.Create] || - grant.actions[action.List] { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains create or list action in a format that does not allow these", grant.CanonicalString())) + } + default: // no specified id + switch grant.typ { + case resource.Unknown: + // Error -- no ID or type isn't valid (although we should never + // get to this point because original parsing should error) + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains no id or type", grant.CanonicalString())) + case resource.All: + // "type=*;actions=..." is not supported -- we require you to + // explicitly set a pin or set the ID to * + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains wildcard type with no id value", grant.CanonicalString())) + default: + // Here we have type=something,actions=. This + // means we're operating on collections and support only create + // or list. Note that wildcard actions are not okay here; that + // uses the format id=*;type=;actions=* + switch len(grant.actions) { + case 0: + // It's okay to have no actions if only output fields are being defined + if _, hasSetFields := grant.OutputFields.Fields(); !hasSetFields { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains no actions or output fields", grant.CanonicalString())) } - case resource.All: - // Verify that the ID is a type that has child types - if !resource.HasChildTypes(idType) { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains an id that does not support child types", grant.CanonicalString())) + case 1: + if !grant.hasActionOrSubaction(action.Create) && + !grant.hasActionOrSubaction(action.List) { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains non-create or non-list action in a format that only allows these", grant.CanonicalString())) } - default: - // Specified resource type, verify it's a child - if resource.Parent(grant.typ) != idType { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains type %s that is not a child type of the type (%s) of the specified id", grant.CanonicalString(), grant.typ.String(), idType.String())) + case 2: + if !grant.hasActionOrSubaction(action.Create) || !grant.hasActionOrSubaction(action.List) { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains non-create or non-list action in a format that only allows these", grant.CanonicalString())) } - } - default: // no specified id - switch grant.typ { - case resource.Unknown: - // Error -- no ID or type isn't valid (although we should never - // get to this point because original parsing should error) - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains no id or type", grant.CanonicalString())) - case resource.All: - // "type=*;actions=..." is not supported -- we require you to - // explicitly set a pin or set the ID to * - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains wildcard type with no id value", grant.CanonicalString())) default: - // Here we have type=something,actions=. This - // means we're operating on collections and support only create - // or list. Note that wildcard actions are not okay here; that - // uses the format id=*;type=;actions=* - switch len(grant.actions) { - case 0: - // It's okay to have no actions if only output fields are being defined - if _, hasSetFields := grant.OutputFields.Fields(); !hasSetFields { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains no actions or output fields", grant.CanonicalString())) - } - case 1: - if !grant.hasActionOrSubaction(action.Create) && - !grant.hasActionOrSubaction(action.List) { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains non-create or non-list action in a format that only allows these", grant.CanonicalString())) - } - case 2: - if !grant.hasActionOrSubaction(action.Create) || !grant.hasActionOrSubaction(action.List) { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains non-create or non-list action in a format that only allows these", grant.CanonicalString())) - } - default: - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains non-create or non-list action in a format that only allows these", grant.CanonicalString())) - } + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q contains non-create or non-list action in a format that only allows these", grant.CanonicalString())) } } - // This might be zero if output fields is populated - if len(grant.actions) > 0 { - // Create a dummy resource and pass it through Allowed and - // ensure that we get allowed. We need to use the templated - // grant, if any, so we send in a clone with an updated ID. - grantForValidation := grant.clone() - grantForValidation.id = grantIds[i] - acl := NewACL(*grantForValidation) - r := Resource{ - ScopeId: scopeId, - Id: grantIds[i], - Type: grant.typ, - } - if !resource.TopLevelType(grant.typ) { - r.Pin = grantIds[i] - } - var allowed bool - for k := range grant.actions { - results := acl.Allowed(r, k, globals.AnonymousUserId, WithSkipAnonymousUserRestrictions(true)) - if results.Authorized { - allowed = true - break - } - } - if !allowed { - return Grant{}, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q would not result in any action being authorized", grant.CanonicalString())) + } + // This might be zero if output fields is populated + if len(grant.actions) > 0 { + // Create a dummy resource and pass it through Allowed and ensure that + // we get allowed. + acl := NewACL(grant) + r := Resource{ + ScopeId: scopeId, + Id: grant.id, + Type: grant.typ, + } + if !resource.TopLevelType(grant.typ) { + r.Pin = grant.id + } + var allowed bool + for k := range grant.actions { + results := acl.Allowed(r, k, globals.AnonymousUserId, WithSkipAnonymousUserRestrictions(true)) + if results.Authorized { + allowed = true + break } } - } - - // If we substituted in a dummy value, replace with the original now - if origId != "" { - grantIds[i] = origId + if !allowed { + return Grant{}, errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("parsed grant string %q would not result in any action being authorized", grant.CanonicalString())) + } } } - // See if we need to move grantIds back for the deprecated case. grantIds - // will always be at least size 1 since we add the empty string if no IDs - // were provided, so we can check to see if that was the case first. - switch { - case grantIds[0] == "": - // Nothing to do - case deprecatedId: - grant.id = grantIds[0] - default: - grant.ids = grantIds + // If we substituted in a dummy value, replace with the original now + if origId != "" { + grant.id = origId } return grant, nil @@ -616,16 +500,16 @@ func Parse(ctx context.Context, scopeId, grantString string, opt ...Option) (Gra // types. It does not explicitly check the resource string itself; that's the // job of the parsing functions to look up the string from the Map and ensure // it's not unknown. -func (g Grant) validateType(ctx context.Context) error { +func (g Grant) validateType() error { const op = "perms.(Grant).validateType" switch g.typ { case resource.Controller: - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unknown type specifier %q", g.typ)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unknown type specifier %q", g.typ)) } return nil } -func (g *Grant) parseAndValidateActions(ctx context.Context) error { +func (g *Grant) parseAndValidateActions() error { const op = "perms.(Grant).parseAndValidateActions" if len(g.actionsBeingParsed) == 0 { g.actionsBeingParsed = nil @@ -634,25 +518,25 @@ func (g *Grant) parseAndValidateActions(ctx context.Context) error { if _, hasSetFields := g.OutputFields.Fields(); hasSetFields { return nil } - return errors.New(ctx, errors.InvalidParameter, op, "missing actions") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing actions") } for _, a := range g.actionsBeingParsed { if a == "" { - return errors.New(ctx, errors.InvalidParameter, op, "empty action found") + return errors.NewDeprecated(errors.InvalidParameter, op, "empty action found") } if g.actions == nil { g.actions = make(map[action.Type]bool, len(g.actionsBeingParsed)) } if am := action.Map[a]; am == action.Unknown { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unknown action %q", a)) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("unknown action %q", a)) } else { g.actions[am] = true } } if len(g.actions) > 1 && g.actions[action.All] { - return errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("%q cannot be specified with other actions", action.All.String())) + return errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("%q cannot be specified with other actions", action.All.String())) } g.actionsBeingParsed = nil diff --git a/internal/perms/grants_test.go b/internal/perms/grants_test.go index 9007fc2d528..bd41146881e 100644 --- a/internal/perms/grants_test.go +++ b/internal/perms/grants_test.go @@ -4,7 +4,6 @@ package perms import ( - "context" "fmt" "testing" @@ -19,8 +18,6 @@ import ( func Test_ActionParsingValidation(t *testing.T) { t.Parallel() - ctx := context.Background() - type input struct { name string input Grant @@ -103,7 +100,7 @@ func Test_ActionParsingValidation(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - err := test.input.parseAndValidateActions(ctx) + err := test.input.parseAndValidateActions() if test.errResult == "" { require.NoError(t, err) assert.Equal(t, test.result, test.input) @@ -117,14 +114,13 @@ func Test_ActionParsingValidation(t *testing.T) { func Test_ValidateType(t *testing.T) { t.Parallel() - ctx := context.Background() var g Grant - for i := resource.Unknown; i <= resource.StorageBucket; i++ { + for i := resource.Unknown; i <= resource.Credential; i++ { g.typ = i if i == resource.Controller { - assert.Error(t, g.validateType(ctx)) + assert.Error(t, g.validateType()) } else { - assert.NoError(t, g.validateType(ctx)) + assert.NoError(t, g.validateType()) } } } @@ -132,8 +128,6 @@ func Test_ValidateType(t *testing.T) { func Test_MarshalingAndCloning(t *testing.T) { t.Parallel() - ctx := context.Background() - type input struct { name string input Grant @@ -165,31 +159,7 @@ func Test_MarshalingAndCloning(t *testing.T) { canonicalString: `id=baz;type=group`, }, { - name: "type and ids", - input: Grant{ - ids: []string{"baz", "bop"}, - scope: Scope{ - Type: scope.Project, - }, - typ: resource.Group, - }, - jsonOutput: `{"ids":["baz","bop"],"type":"group"}`, - canonicalString: `ids=baz,bop;type=group`, - }, - { - name: "type and ids single id", - input: Grant{ - ids: []string{"baz"}, - scope: Scope{ - Type: scope.Project, - }, - typ: resource.Group, - }, - jsonOutput: `{"ids":["baz"],"type":"group"}`, - canonicalString: `ids=baz;type=group`, - }, - { - name: "output fields id", + name: "output fields", input: Grant{ id: "baz", scope: Scope{ @@ -208,26 +178,7 @@ func Test_MarshalingAndCloning(t *testing.T) { canonicalString: `id=baz;type=group;output_fields=id,name,version`, }, { - name: "output fields ids", - input: Grant{ - ids: []string{"baz", "bop"}, - scope: Scope{ - Type: scope.Project, - }, - typ: resource.Group, - OutputFields: &OutputFields{ - fields: map[string]bool{ - "name": true, - "version": true, - "id": true, - }, - }, - }, - jsonOutput: `{"ids":["baz","bop"],"output_fields":["id","name","version"],"type":"group"}`, - canonicalString: `ids=baz,bop;type=group;output_fields=id,name,version`, - }, - { - name: "everything id", + name: "everything", input: Grant{ id: "baz", scope: Scope{ @@ -250,35 +201,11 @@ func Test_MarshalingAndCloning(t *testing.T) { jsonOutput: `{"actions":["create","read"],"id":"baz","output_fields":["id","name","version"],"type":"group"}`, canonicalString: `id=baz;type=group;actions=create,read;output_fields=id,name,version`, }, - { - name: "everything ids", - input: Grant{ - ids: []string{"baz", "bop"}, - scope: Scope{ - Type: scope.Project, - }, - typ: resource.Group, - actions: map[action.Type]bool{ - action.Create: true, - action.Read: true, - }, - actionsBeingParsed: []string{"create", "read"}, - OutputFields: &OutputFields{ - fields: map[string]bool{ - "name": true, - "version": true, - "ids": true, - }, - }, - }, - jsonOutput: `{"actions":["create","read"],"ids":["baz","bop"],"output_fields":["ids","name","version"],"type":"group"}`, - canonicalString: `ids=baz,bop;type=group;actions=create,read;output_fields=ids,name,version`, - }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - output, err := test.input.MarshalJSON(ctx) + output, err := test.input.MarshalJSON() require.NoError(t, err) assert.Equal(t, test.jsonOutput, string(output)) assert.Equal(t, test.canonicalString, test.input.CanonicalString()) @@ -290,8 +217,6 @@ func Test_MarshalingAndCloning(t *testing.T) { func Test_Unmarshaling(t *testing.T) { t.Parallel() - ctx := context.Background() - type input struct { name string jsonInput string @@ -315,19 +240,12 @@ func Test_Unmarshaling(t *testing.T) { jsonErr: "perms.(Grant).unmarshalJSON: error occurred during decode, encoding issue: error #303: invalid character 'w' looking for beginning of value", }, { - name: "bad segment id", + name: "bad segment", jsonInput: `{"id":true}`, jsonErr: `perms.(Grant).unmarshalJSON: unable to interpret "id" as string: parameter violation: error #100`, textInput: `id=`, textErr: `perms.(Grant).unmarshalText: segment "id=" not formatted correctly, missing value: parameter violation: error #100`, }, - { - name: "bad segment ids", - jsonInput: `{"ids":true}`, - jsonErr: `perms.(Grant).unmarshalJSON: unable to interpret "ids" as array: parameter violation: error #100`, - textInput: `ids=`, - textErr: `perms.(Grant).unmarshalText: segment "ids=" not formatted correctly, missing value: parameter violation: error #100`, - }, { name: "good id", expected: Grant{ @@ -336,14 +254,6 @@ func Test_Unmarshaling(t *testing.T) { jsonInput: `{"id":"foobar"}`, textInput: `id=foobar`, }, - { - name: "good ids", - expected: Grant{ - ids: []string{"foobar"}, - }, - jsonInput: `{"ids":["foobar"]}`, - textInput: `ids=foobar`, - }, { name: "bad id", jsonInput: `{"id":true}`, @@ -351,13 +261,6 @@ func Test_Unmarshaling(t *testing.T) { textInput: `=id`, textErr: `perms.(Grant).unmarshalText: segment "=id" not formatted correctly, missing key: parameter violation: error #100`, }, - { - name: "bad ids", - jsonInput: `{"ids":true}`, - jsonErr: `perms.(Grant).unmarshalJSON: unable to interpret "ids" as array: parameter violation: error #100`, - textInput: `=ids`, - textErr: `perms.(Grant).unmarshalText: segment "=ids" not formatted correctly, missing key: parameter violation: error #100`, - }, { name: "good type", expected: Grant{ @@ -374,7 +277,7 @@ func Test_Unmarshaling(t *testing.T) { textErr: `perms.(Grant).unmarshalText: segment "type=host-catalog=id" not formatted correctly, wrong number of equal signs: parameter violation: error #100`, }, { - name: "good output fields id", + name: "good output fields", expected: Grant{ OutputFields: &OutputFields{ fields: map[string]bool{ @@ -388,33 +291,12 @@ func Test_Unmarshaling(t *testing.T) { textInput: `output_fields=id,version,name`, }, { - name: "good output fields ids", - expected: Grant{ - OutputFields: &OutputFields{ - fields: map[string]bool{ - "name": true, - "version": true, - "ids": true, - }, - }, - }, - jsonInput: `{"output_fields":["ids","name","version"]}`, - textInput: `output_fields=ids,version,name`, - }, - { - name: "bad output fields id", + name: "bad output fields", jsonInput: `{"output_fields":true}`, jsonErr: `perms.(Grant).unmarshalJSON: unable to interpret "output_fields" as array: parameter violation: error #100`, textInput: `output_fields=id=version,name`, textErr: `perms.(Grant).unmarshalText: segment "output_fields=id=version,name" not formatted correctly, wrong number of equal signs: parameter violation: error #100`, }, - { - name: "bad output fields ids", - jsonInput: `{"output_fields":true}`, - jsonErr: `perms.(Grant).unmarshalJSON: unable to interpret "output_fields" as array: parameter violation: error #100`, - textInput: `output_fields=ids=version,name`, - textErr: `perms.(Grant).unmarshalText: segment "output_fields=ids=version,name" not formatted correctly, wrong number of equal signs: parameter violation: error #100`, - }, { name: "good actions", expected: Grant{ @@ -450,7 +332,7 @@ func Test_Unmarshaling(t *testing.T) { require := require.New(t) var g Grant if test.jsonInput != "" { - err := g.unmarshalJSON(ctx, []byte(test.jsonInput)) + err := g.unmarshalJSON([]byte(test.jsonInput)) if test.jsonErr != "" { require.Error(err) assert.Equal(test.jsonErr, err.Error()) @@ -461,7 +343,7 @@ func Test_Unmarshaling(t *testing.T) { } g = Grant{} if test.textInput != "" { - err := g.unmarshalText(ctx, test.textInput) + err := g.unmarshalText(test.textInput) if test.textErr != "" { require.Error(err) assert.Equal(test.textErr, err.Error()) @@ -477,8 +359,6 @@ func Test_Unmarshaling(t *testing.T) { func Test_Parse(t *testing.T) { t.Parallel() - ctx := context.Background() - type input struct { name string input string @@ -506,38 +386,28 @@ func Test_Parse(t *testing.T) { }, { name: "bad type", - input: "ids=s_foobar;type=barfoo;actions=read", + input: "id=foobar;type=barfoo;actions=read", err: `perms.Parse: unable to parse grant string: perms.(Grant).unmarshalText: unknown type specifier "barfoo": parameter violation: error #100`, }, { name: "bad actions", - input: "ids=hcst_foobar;type=host-catalog;actions=createread", + input: "id=foobar;type=host-catalog;actions=createread", err: `perms.Parse: perms.(Grant).parseAndValidateActions: unknown action "createread": parameter violation: error #100`, }, { name: "bad id type", - input: "id=foobar;actions=read", - err: `perms.Parse: parsed grant string "id=foobar;actions=read" contains an id "foobar" of an unknown resource type: parameter violation: error #100`, - }, - { - name: "bad ids type first position", - input: "ids=foobar,hcst_foobar;actions=read", - err: `perms.Parse: input grant string "ids=foobar,hcst_foobar;actions=read" contains ids of differently-typed resources: parameter violation: error #100`, + input: "id=foobar;actions=create", + err: `perms.Parse: parsed grant string "id=foobar;actions=create" contains an id "foobar" of an unknown resource type: parameter violation: error #100`, }, { - name: "bad ids type second position", - input: "ids=hcst_foobar,foobar;actions=read", - err: `perms.Parse: input grant string "ids=hcst_foobar,foobar;actions=read" contains ids of differently-typed resources: parameter violation: error #100`, + name: "bad create action for id", + input: "id=u_foobar;actions=create", + err: `perms.Parse: parsed grant string "id=u_foobar;actions=create" contains create or list action in a format that does not allow these: parameter violation: error #100`, }, { - name: "bad create action for ids", - input: "ids=u_foobar;actions=create", - err: `perms.Parse: parsed grant string "ids=u_foobar;actions=create" contains create or list action in a format that does not allow these: parameter violation: error #100`, - }, - { - name: "bad create action for ids with other perms", - input: "ids=u_foobar;actions=read,create", - err: `perms.Parse: parsed grant string "ids=u_foobar;actions=create,read" contains create or list action in a format that does not allow these: parameter violation: error #100`, + name: "bad create action for id with other perms", + input: "id=u_foobar;actions=read,create", + err: `perms.Parse: parsed grant string "id=u_foobar;actions=create,read" contains create or list action in a format that does not allow these: parameter violation: error #100`, }, { name: "bad list action for id", @@ -545,7 +415,7 @@ func Test_Parse(t *testing.T) { err: `perms.Parse: parsed grant string "id=u_foobar;actions=list" contains create or list action in a format that does not allow these: parameter violation: error #100`, }, { - name: "bad list action for type with other perms", + name: "bad list action for id with other perms", input: "type=host-catalog;actions=list,read", err: `perms.Parse: parsed grant string "type=host-catalog;actions=list,read" contains non-create or non-list action in a format that only allows these: parameter violation: error #100`, }, @@ -554,38 +424,18 @@ func Test_Parse(t *testing.T) { input: "id=*;actions=read", err: `perms.Parse: parsed grant string "id=*;actions=read" contains wildcard id and no specified type: parameter violation: error #100`, }, - { - name: "wildcard ids and actions without collection", - input: "ids=*;actions=read", - err: `perms.Parse: parsed grant string "ids=*;actions=read" contains wildcard id and no specified type: parameter violation: error #100`, - }, { name: "wildcard id and actions with list", input: "id=*;actions=read,list", err: `perms.Parse: parsed grant string "id=*;actions=list,read" contains wildcard id and no specified type: parameter violation: error #100`, }, { - name: "wildcard ids and actions with list", - input: "ids=*;actions=read,list", - err: `perms.Parse: parsed grant string "ids=*;actions=list,read" contains wildcard id and no specified type: parameter violation: error #100`, - }, - { - name: "wildcard type with no ids", + name: "wildcard type with no id", input: "type=*;actions=read,list", err: `perms.Parse: parsed grant string "type=*;actions=list,read" contains wildcard type with no id value: parameter violation: error #100`, }, { - name: "mixed wildcard and non wildcard ids first position", - input: "ids=*,u_foobar;actions=read,list", - err: `perms.Parse: input grant string "ids=*,u_foobar;actions=read,list" contains both wildcard and non-wildcard values in "ids" field: parameter violation: error #100`, - }, - { - name: "mixed wildcard and non wildcard ids second position", - input: "ids=u_foobar,*;actions=read,list", - err: `perms.Parse: input grant string "ids=u_foobar,*;actions=read,list" contains both wildcard and non-wildcard values in "ids" field: parameter violation: error #100`, - }, - { - name: "empty ids and type", + name: "empty id and type", input: "actions=create", err: `perms.Parse: parsed grant string "actions=create" contains no id or type: parameter violation: error #100`, }, @@ -595,30 +445,10 @@ func Test_Parse(t *testing.T) { err: `perms.Parse: parsed grant string "id=ttcp_1234567890;type=*;actions=create" contains an id that does not support child types: parameter violation: error #100`, }, { - name: "wildcard type non child ids first position", - input: "ids=ttcp_1234567890,ttcp_1234567890;type=*;actions=create", - err: `perms.Parse: parsed grant string "ids=ttcp_1234567890,ttcp_1234567890;type=*;actions=create" contains an id that does not support child types: parameter violation: error #100`, - }, - { - name: "wildcard type non child ids second position", - input: "ids=ttcp_1234567890,ttcp_1234567890;type=*;actions=create", - err: `perms.Parse: parsed grant string "ids=ttcp_1234567890,ttcp_1234567890;type=*;actions=create" contains an id that does not support child types: parameter violation: error #100`, - }, - { - name: "specified resource type non child id", + name: "specified resource type non child", input: "id=hcst_1234567890;type=account;actions=read", err: `perms.Parse: parsed grant string "id=hcst_1234567890;type=account;actions=read" contains type account that is not a child type of the type (host-catalog) of the specified id: parameter violation: error #100`, }, - { - name: "specified resource type non child ids first position", - input: "ids=hcst_1234567890,hcst_1234567890;type=account;actions=read", - err: `perms.Parse: parsed grant string "ids=hcst_1234567890,hcst_1234567890;type=account;actions=read" contains type account that is not a child type of the type (host-catalog) of the specified id: parameter violation: error #100`, - }, - { - name: "specified resource type non child ids second position", - input: "ids=hcst_1234567890,hcst_1234567890;type=account;actions=read", - err: `perms.Parse: parsed grant string "ids=hcst_1234567890,hcst_1234567890;type=account;actions=read" contains type account that is not a child type of the type (host-catalog) of the specified id: parameter violation: error #100`, - }, { name: "no id with one bad action", input: "type=host-set;actions=read", @@ -688,22 +518,6 @@ func Test_Parse(t *testing.T) { }, }, }, - { - name: "wildcard ids and type and actions with list", - input: "ids=*;type=*;actions=read,list", - expected: Grant{ - scope: Scope{ - Id: "o_scope", - Type: scope.Org, - }, - ids: []string{"*"}, - typ: resource.All, - actions: map[action.Type]bool{ - action.Read: true, - action.List: true, - }, - }, - }, { name: "good json type", input: `{"type":"host-catalog","actions":["create"]}`, @@ -734,22 +548,7 @@ func Test_Parse(t *testing.T) { }, }, { - name: "good json ids", - input: `{"ids":["hcst_foobar", "hcst_foobaz"],"actions":["read"]}`, - expected: Grant{ - scope: Scope{ - Id: "o_scope", - Type: scope.Org, - }, - ids: []string{"hcst_foobar", "hcst_foobaz"}, - typ: resource.Unknown, - actions: map[action.Type]bool{ - action.Read: true, - }, - }, - }, - { - name: "good json output fields id", + name: "good json output fields", input: `{"id":"u_foobar","actions":["read"],"output_fields":["version","id","name"]}`, expected: Grant{ scope: Scope{ @@ -770,28 +569,6 @@ func Test_Parse(t *testing.T) { }, }, }, - { - name: "good json output fields ids", - input: `{"ids":["u_foobar"],"actions":["read"],"output_fields":["version","ids","name"]}`, - expected: Grant{ - scope: Scope{ - Id: "o_scope", - Type: scope.Org, - }, - ids: []string{"u_foobar"}, - typ: resource.Unknown, - actions: map[action.Type]bool{ - action.Read: true, - }, - OutputFields: &OutputFields{ - fields: map[string]bool{ - "version": true, - "ids": true, - "name": true, - }, - }, - }, - }, { name: "good json output fields no action", input: `{"id":"u_foobar","output_fields":["version","id","name"]}`, @@ -841,22 +618,7 @@ func Test_Parse(t *testing.T) { }, }, { - name: "good text ids", - input: `ids=hcst_foobar,hcst_foobaz;actions=read`, - expected: Grant{ - scope: Scope{ - Id: "o_scope", - Type: scope.Org, - }, - ids: []string{"hcst_foobar", "hcst_foobaz"}, - typ: resource.Unknown, - actions: map[action.Type]bool{ - action.Read: true, - }, - }, - }, - { - name: "good output fields id", + name: "good output fields", input: `id=u_foobar;actions=read;output_fields=version,id,name`, expected: Grant{ scope: Scope{ @@ -877,28 +639,6 @@ func Test_Parse(t *testing.T) { }, }, }, - { - name: "good output fields ids", - input: `ids=hcst_foobar,hcst_foobaz;actions=read;output_fields=version,ids,name`, - expected: Grant{ - scope: Scope{ - Id: "o_scope", - Type: scope.Org, - }, - ids: []string{"hcst_foobar", "hcst_foobaz"}, - typ: resource.Unknown, - actions: map[action.Type]bool{ - action.Read: true, - }, - OutputFields: &OutputFields{ - fields: map[string]bool{ - "version": true, - "ids": true, - "name": true, - }, - }, - }, - }, { name: "default project scope", input: `id=hcst_foobar;actions=read`, @@ -953,12 +693,6 @@ func Test_Parse(t *testing.T) { userId: "u_abcd1234", err: `perms.Parse: unknown template "{{superman}}" in grant "id" value: parameter violation: error #100`, }, - { - name: "bad user ids template", - input: `ids={{superman}};actions=create,read`, - userId: "u_abcd1234", - err: `perms.Parse: unknown template "{{superman}}" in grant "ids" value: parameter violation: error #100`, - }, { name: "good user id template", input: `id={{ user.id}};actions=read,update`, @@ -981,12 +715,6 @@ func Test_Parse(t *testing.T) { accountId: fmt.Sprintf("%s_1234567890", globals.PasswordAccountPreviousPrefix), err: `perms.Parse: unknown template "{{superman}}" in grant "id" value: parameter violation: error #100`, }, - { - name: "bad old account ids template", - input: `ids={{superman}};actions=read`, - accountId: fmt.Sprintf("%s_1234567890", globals.PasswordAccountPreviousPrefix), - err: `perms.Parse: unknown template "{{superman}}" in grant "ids" value: parameter violation: error #100`, - }, { name: "bad new account id template", input: `id={{superman}};actions=read`, @@ -1025,30 +753,13 @@ func Test_Parse(t *testing.T) { }, }, }, - { - name: "good ids template", - input: `ids={{ user.id}},{{ account.id}};actions=read,update`, - userId: "u_abcd1234", - accountId: fmt.Sprintf("%s_1234567890", globals.PasswordAccountPrefix), - expected: Grant{ - scope: Scope{ - Id: "o_scope", - Type: scope.Org, - }, - ids: []string{"u_abcd1234", "acctpw_1234567890"}, - actions: map[action.Type]bool{ - action.Update: true, - action.Read: true, - }, - }, - }, } - _, err := Parse(ctx, "", "") + _, err := Parse("", "") require.Error(t, err) assert.Equal(t, "perms.Parse: missing grant string: parameter violation: error #100", err.Error()) - _, err = Parse(ctx, "", "{}") + _, err = Parse("", "{}") require.Error(t, err) assert.Equal(t, "perms.Parse: missing scope id: parameter violation: error #100", err.Error()) @@ -1060,7 +771,7 @@ func Test_Parse(t *testing.T) { if test.scopeOverride != "" { scope = test.scopeOverride } - grant, err := Parse(ctx, scope, test.input, WithUserId(test.userId), WithAccountId(test.accountId)) + grant, err := Parse(scope, test.input, WithUserId(test.userId), WithAccountId(test.accountId)) if test.err != "" { require.Error(err) assert.Equal(test.err, err.Error()) @@ -1122,38 +833,31 @@ func TestHasActionOrSubaction(t *testing.T) { } func FuzzParse(f *testing.F) { - ctx := context.Background() - f.Add("type=host-catalog;actions=create") f.Add("type=*;actions=*") f.Add("id=*;type=*;actions=*") - f.Add("ids=*;type=*;actions=*") f.Add("id=*;type=*;actions=read,list") - f.Add("ids=*;type=*;actions=read,list") f.Add("id=foobar;actions=read;output_fields=version,id,name") - f.Add("ids=foobar,foobaz;actions=read;output_fields=version,id,name") f.Add("id={{account.id}};actions=update,read") - f.Add("ids={{account.id}},{{user.id}};actions=update,read") - f.Add(`{"id":"foobar","type":"host-catalog","actions":["create"]}`) - f.Add(`{"ids":["foobar"],"type":"host-catalog","actions":["create"]}`) + f.Add(`{id:"foobar","type":"host-catalog","actions":["create"]}`) f.Fuzz(func(t *testing.T, grant string) { - g, err := Parse(ctx, "global", grant, WithSkipFinalValidation(true)) + g, err := Parse("global", grant, WithSkipFinalValidation(true)) if err != nil { return } - g2, err := Parse(ctx, "global", g.CanonicalString(), WithSkipFinalValidation(true)) + g2, err := Parse("global", g.CanonicalString(), WithSkipFinalValidation(true)) if err != nil { t.Fatal("Failed to parse canonical string:", err) } if g.CanonicalString() != g2.CanonicalString() { t.Errorf("grant roundtrip failed, input %q, output %q", g.CanonicalString(), g2.CanonicalString()) } - jsonBytes, err := g.MarshalJSON(ctx) + jsonBytes, err := g.MarshalJSON() if err != nil { t.Error("Failed to marshal JSON:", err) } - g3, err := Parse(ctx, "global", string(jsonBytes), WithSkipFinalValidation(true)) + g3, err := Parse("global", string(jsonBytes), WithSkipFinalValidation(true)) if err != nil { t.Fatal("Failed to parse json string:", err) } diff --git a/internal/perms/output_fields_test.go b/internal/perms/output_fields_test.go index 9aa0745fa3a..24f0b204a41 100644 --- a/internal/perms/output_fields_test.go +++ b/internal/perms/output_fields_test.go @@ -4,7 +4,6 @@ package perms import ( - "context" "testing" "github.com/hashicorp/boundary/globals" @@ -136,8 +135,6 @@ func Test_OutputFields(t *testing.T) { func Test_ACLOutputFields(t *testing.T) { t.Parallel() - ctx := context.Background() - type input struct { name string grants []string @@ -279,7 +276,7 @@ func Test_ACLOutputFields(t *testing.T) { t.Run(test.name, func(t *testing.T) { var grants []Grant for _, g := range test.grants { - grant, err := Parse(ctx, "o_myorg", g) + grant, err := Parse("o_myorg", g) require.NoError(t, err) grants = append(grants, grant) } diff --git a/internal/plugin/ids.go b/internal/plugin/ids.go index b3e8fa0ab03..dce9587fd6f 100644 --- a/internal/plugin/ids.go +++ b/internal/plugin/ids.go @@ -4,8 +4,6 @@ package plugin import ( - "context" - "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" ) @@ -15,10 +13,10 @@ const ( PluginPrefix = "pl" ) -func newPluginId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, PluginPrefix) +func newPluginId() (string, error) { + id, err := db.NewPublicId(PluginPrefix) if err != nil { - return "", errors.Wrap(ctx, err, "plugin.newPluginId") + return "", errors.WrapDeprecated(err, "plugin.newPluginId") } return id, nil } diff --git a/internal/plugin/plugin_test.go b/internal/plugin/plugin_test.go index 438ff19df4f..00b65ff6855 100644 --- a/internal/plugin/plugin_test.go +++ b/internal/plugin/plugin_test.go @@ -16,7 +16,6 @@ import ( ) func TestPlugin_Create(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") tests := []struct { @@ -74,7 +73,7 @@ func TestPlugin_Create(t *testing.T) { require.NotNil(t, got) require.Emptyf(t, got.PublicId, "PublicId set") - id, err := newPluginId(ctx) + id, err := newPluginId() require.NoError(t, err) got.PublicId = id @@ -82,7 +81,7 @@ func TestPlugin_Create(t *testing.T) { assert.Equal(t, tt.want, got) w := db.New(conn) - err = w.Create(ctx, got) + err = w.Create(context.Background(), got) if tt.wantErr { assert.Error(t, err) } else { diff --git a/internal/plugin/repository_plugin.go b/internal/plugin/repository_plugin.go index ea2b1f59ceb..1072b215eb8 100644 --- a/internal/plugin/repository_plugin.go +++ b/internal/plugin/repository_plugin.go @@ -46,7 +46,7 @@ func (r *Repository) CreatePlugin(ctx context.Context, p *Plugin, opt ...Option) p.PublicId = opts.withPublicId if p.PublicId == "" { var err error - p.PublicId, err = newPluginId(ctx) + p.PublicId, err = newPluginId() if err != nil { return nil, errors.Wrap(ctx, err, op) } diff --git a/internal/plugin/repository_plugin_test.go b/internal/plugin/repository_plugin_test.go index 6e861f3c997..2dcd51baece 100644 --- a/internal/plugin/repository_plugin_test.go +++ b/internal/plugin/repository_plugin_test.go @@ -166,12 +166,11 @@ func assertPublicId(t *testing.T, prefix, actual string) { } func TestRepository_LookupPlugin(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) plg := TestPlugin(t, conn, "test") - badId, err := newPluginId(ctx) + badId, err := newPluginId() assert.NoError(t, err) assert.NotNil(t, badId) @@ -204,11 +203,12 @@ func TestRepository_LookupPlugin(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert := assert.New(t) kms := kms.TestKms(t, conn, wrapper) + ctx := context.Background() repo, err := NewRepository(ctx, rw, rw, kms) assert.NoError(err) assert.NotNil(repo) - got, err := repo.LookupPlugin(ctx, tt.id) + got, err := repo.LookupPlugin(context.Background(), tt.id) if tt.wantErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err) return diff --git a/internal/plugin/testing.go b/internal/plugin/testing.go index 7fd1b25278d..c00b21bda76 100644 --- a/internal/plugin/testing.go +++ b/internal/plugin/testing.go @@ -90,9 +90,8 @@ func WithStorageFlag(flag bool) TestOption { func TestPlugin(t testing.TB, conn *db.DB, name string, opt ...TestOption) *Plugin { opts := getTestOpts(opt...) t.Helper() - ctx := context.Background() p := NewPlugin(WithName(name)) - id, err := newPluginId(ctx) + id, err := newPluginId() require.NoError(t, err) p.PublicId = id @@ -100,6 +99,7 @@ func TestPlugin(t testing.TB, conn *db.DB, name string, opt ...TestOption) *Plug require.NoError(t, w.Create(context.Background(), p)) wrapper := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrapper) + ctx := context.Background() repo, err := NewRepository(ctx, w, w, kmsCache) require.NoError(t, err) diff --git a/internal/proto/controller/api/resources/roles/v1/role.proto b/internal/proto/controller/api/resources/roles/v1/role.proto index d5318552e8a..102ff372458 100644 --- a/internal/proto/controller/api/resources/roles/v1/role.proto +++ b/internal/proto/controller/api/resources/roles/v1/role.proto @@ -25,11 +25,7 @@ message Principal { message GrantJson { // Output only. The ID, if set. - // Deprecated: use "ids" instead. - string id = 1 [deprecated = true]; // @gotags: `class:"public"` - - // Output only. The IDs, if set. - repeated string ids = 4; // @gotags: `class:"public"` + string id = 1; // @gotags: `class:"public"` // Output only. The type, if set. string type = 2; // @gotags: `class:"public"` diff --git a/internal/proto/controller/api/resources/sessionrecordings/v1/session_recording.proto b/internal/proto/controller/api/resources/sessionrecordings/v1/session_recording.proto index 27ac940e87c..06131e92047 100644 --- a/internal/proto/controller/api/resources/sessionrecordings/v1/session_recording.proto +++ b/internal/proto/controller/api/resources/sessionrecordings/v1/session_recording.proto @@ -218,162 +218,6 @@ message SshTargetAttributes { uint32 default_client_port = 2 [json_name = "default_client_port"]; // @gotags: class:"public" } -// CredentialStore contains all fields related to a Credential Store resource -message CredentialStore { - // The ID of the Credential Store. - string id = 1; // @gotags: class:"public" - - // The ID of the Scope of which this Credential Store is a part. - string scope_id = 2 [json_name = "scope_id"]; // @gotags: class:"public" - - // The name for identification purposes if set. - string name = 3; // @gotags: class:"public" - - // The description for identification purposes if set. - string description = 4; // @gotags: class:"public" - - // The Credential Store type. - string type = 5; // @gotags: class:"public" - - oneof attrs { - // The attributes that are applicable for the specific Credential Store type. - google.protobuf.Struct attributes = 6; - } -} - -// The attributes of a vault typed Credential Store. -message VaultCredentialStoreAttributes { - // The complete url address of vault. - string address = 1; // @gotags: class:"public" - - // The namespace of vault used by this store - string namespace = 2; // @gotags: class:"public" - - // The value to use as the SNI host when connecting to vault via TLS. - string tls_server_name = 3 [json_name = "tls_server_name"]; // @gotags: class:"public" - - // Indicates if verification of the TLS certificate is disabled. - bool tls_skip_verify = 4 [json_name = "tls_skip_verify"]; // @gotags: class:"public" - - // Filters to the worker(s) who can handle Vault requests for this cred store if set. - string worker_filter = 5 [json_name = "worker_filter"]; // @gotags: class:"public" -} - -// Credential contains fields related to an Credential resource -message Credential { - // The ID of the Credential. - string id = 1; // @gotags: class:"public" - - // The Credential Store of which this Credential is a part. - CredentialStore credential_store = 2 [json_name = "credential_store"]; // @gotags: class:"public" - - // The name of the credential. - string name = 3; // @gotags: class:"public" - - // Optional user-set description. - string description = 4; // @gotags: class:"public" - - // The purposes for which this Credential was attached to the sesssion. - repeated string purposes = 5; - - // The Credential type. - string type = 6; // @gotags: class:"public" - - oneof attrs { - // The attributes that are applicable for the specific Credential type. - google.protobuf.Struct attributes = 7; - } -} - -// The attributes of a UsernamePassword Credential. -message UsernamePasswordCredentialAttributes { - // The username associated with the credential. - string username = 1; // @gotags: class:"public" - - // The hmac value of the password. - string password_hmac = 2; // @gotags: class:"public" -} - -// The attributes of a SshPrivateKey Credential. -message SshPrivateKeyCredentialAttributes { - // The username associated with the credential. - string username = 1; // @gotags: class:"public" - - // The hmac value of the SSH private key. - string private_key_hmac = 2; // @gotags: class:"public" - - // The hmac value of the SSH private key passphrase. - string private_key_passphrase_hmac = 3; // @gotags: class:"public" -} - -// The attributes of a JSON Credential. -message JsonCredentialAttributes { - // The hmac value of the object. - string object_hmac = 1; // @gotags: class:"public" -} - -// CredentialLibrary contains all fields related to an Credential Library resource -message CredentialLibrary { - // The ID of the Credential Library. - string id = 1; // @gotags: class:"public" - - // The credential store of which this library is a part. - CredentialStore credential_store = 2 [json_name = "credential_store"]; // @gotags: class:"public" - - // Optional name of this Credential Library. - string name = 3; // @gotags: class:"public" - - // Optional user-set description of this Credential Library. - string description = 4; // @gotags: class:"public" - - // The purposes for which this CredentialLibrary was attached to the sesssion. - repeated string purposes = 5; - - // The Credential Library type. - string type = 6; // @gotags: class:"public" - - oneof attrs { - // The attributes that are applicable for the specific Credential Library type. - google.protobuf.Struct attributes = 7; - } -} - -// The attributes of a vault typed Credential Library. -message VaultCredentialLibraryAttributes { - // The path in Vault to request credentials from. - string path = 1; // @gotags: class:"public" - - // The HTTP method the library uses to communicate with Vault. - string http_method = 2; // @gotags: class:"public" - - // The body of the HTTP request the library sends to vault. - string http_request_body = 3; // @gotags: `class:"secret"` -} - -// The attributes of a vault SSH Certificate Credential Library. -message VaultSSHCertificateCredentialLibraryAttributes { - // The path in Vault to request credentials from. - string path = 1; // @gotags: class:"public" - - // The username used when making an SSH connection. - string username = 2; // @gotags: `class:"sensitive"` - - // The key type to use when generating an SSH private key. - string key_type = 3; // @gotags: class:"public" - - // The number of bits to use to generate an SSH private key. - uint32 key_bits = 4; // @gotags: class:"public" - - // The requested time to live for the certificate. - string ttl = 5; // @gotags: class:"public" - - // The critical options that the certificate should be signed for. - map critical_options = 6; // @gotags: class:"public" - - // The extensions that the certificate should be signed for. - map extensions = 7; // @gotags: class:"public" -} - // ValuesAtTime contain information about other Boundary resources as they // were at a certain time through the lifetime of the Session Recording. message ValuesAtTime { @@ -385,12 +229,6 @@ message ValuesAtTime { // Information about the Host chosen for the session. Host host = 3; // @gotags: class:"public" - - // Information about the Credentials used for this session. - repeated Credential credentials = 4; - - // Information about the Credential Libraries used for this session. - repeated CredentialLibrary credential_libraries = 5 [json_name = "credential_libraries"]; } // SessionRecording contains information about the recording of a Session. diff --git a/internal/proto/plugin/v1/host_plugin_service.proto b/internal/proto/plugin/v1/host_plugin_service.proto index f668eb63c98..da776ebffda 100644 --- a/internal/proto/plugin/v1/host_plugin_service.proto +++ b/internal/proto/plugin/v1/host_plugin_service.proto @@ -17,20 +17,6 @@ service HostPluginService { // allows those values to be normalized prior to creating or updating those // values in the host catalog data. // - // NormalizeCatalogData is useful for converting the values of attributes from - // a certain format/type to an expected value format/type. This is useful - // during migration of values. - // - // NormalizeCatalogData is called before the values of attributes are persisted. - // All normalized values will be persisted in Boundary and returned - // to all clients. - // - // NormalizeCatalogData could affect other clients. For example, on Terraform, - // if data is passed to Boundary and then normalized into a new data - // structure, it could cause diffs in Terraform for unchanged values. - // This is because, the data structure in Terraform's state will now be - // different from the normalized data structure returned from Boundary. - // // NormalizeCatalogData is called before: // * OnCreateCatalog // * OnUpdateCatalog @@ -52,20 +38,6 @@ service HostPluginService { // allows those values to be normalized prior to creating or updating those // values in the host set data. // - // NormalizeSetData is useful for converting the values of attributes from - // a certain format/type to an expected value format/type. This is useful - // during migration of values. - // - // NormalizeSetData is called before the values of attributes are persisted. - // All normalized values will be persisted in Boundary and returned - // to all clients. - // - // NormalizeSetData could affect other clients. For example, on Terraform, - // if data is passed to Boundary and then normalized into a new data - // structure, it could cause diffs in Terraform for unchanged values. - // This is because, the data structure in Terraform's state will now be - // different from the normalized data structure returned from Boundary. - // // NormalizeSetData is called before: // * OnCreateSet // * OnUpdateSet diff --git a/internal/scheduler/additional_verification_test.go b/internal/scheduler/additional_verification_test.go index 09ec21cb015..fb1da0b797d 100644 --- a/internal/scheduler/additional_verification_test.go +++ b/internal/scheduler/additional_verification_test.go @@ -154,7 +154,6 @@ func TestSchedulerCancelCtx(t *testing.T) { func TestSchedulerInterruptedCancelCtx(t *testing.T) { // do not use t.Parallel() since it relies on the sys eventer assert, require := assert.New(t), require.New(t) - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) rw := db.New(conn) @@ -172,12 +171,12 @@ func TestSchedulerInterruptedCancelCtx(t *testing.T) { fn, job1Ready, job1Done := testJobFn() tj1 := testJob{name: "name1", description: "desc", fn: fn, nextRunIn: time.Hour} - err = sched.RegisterJob(ctx, tj1) + err = sched.RegisterJob(context.Background(), tj1) require.NoError(err) fn, job2Ready, job2Done := testJobFn() tj2 := testJob{name: "name2", description: "desc", fn: fn, nextRunIn: time.Hour} - err = sched.RegisterJob(ctx, tj2) + err = sched.RegisterJob(context.Background(), tj2) require.NoError(err) baseCtx, baseCnl := context.WithCancel(context.Background()) @@ -217,12 +216,12 @@ func TestSchedulerInterruptedCancelCtx(t *testing.T) { } // Interrupt job 1 run to cause monitor loop to trigger cancel - repo, err := job.NewRepository(ctx, rw, rw, kmsCache) + repo, err := job.NewRepository(rw, rw, kmsCache) require.NoError(err) - run, err := repo.LookupRun(ctx, run1Id) + run, err := repo.LookupRun(context.Background(), run1Id) require.NoError(err) run.Status = string(job.Interrupted) - rowsUpdated, err := rw.Update(ctx, run, []string{"Status"}, nil) + rowsUpdated, err := rw.Update(context.Background(), run, []string{"Status"}, nil) require.NoError(err) assert.Equal(1, rowsUpdated) @@ -240,12 +239,12 @@ func TestSchedulerInterruptedCancelCtx(t *testing.T) { } // Interrupt job 2 run to cause monitor loop to trigger cancel - repo, err = job.NewRepository(ctx, rw, rw, kmsCache) + repo, err = job.NewRepository(rw, rw, kmsCache) require.NoError(err) - run, err = repo.LookupRun(ctx, run2Id) + run, err = repo.LookupRun(context.Background(), run2Id) require.NoError(err) run.Status = string(job.Interrupted) - rowsUpdated, err = rw.Update(ctx, run, []string{"Status"}, nil) + rowsUpdated, err = rw.Update(context.Background(), run, []string{"Status"}, nil) require.NoError(err) assert.Equal(1, rowsUpdated) @@ -255,7 +254,6 @@ func TestSchedulerInterruptedCancelCtx(t *testing.T) { func TestSchedulerJobProgress(t *testing.T) { // do not use t.Parallel() since it relies on the sys eventer - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -322,9 +320,9 @@ func TestSchedulerJobProgress(t *testing.T) { // Wait for scheduler to query for job status before verifying previous results <-statusRequest - repo, err := job.NewRepository(ctx, rw, rw, kmsCache) + repo, err := job.NewRepository(rw, rw, kmsCache) require.NoError(err) - run, err := repo.LookupRun(ctx, runId) + run, err := repo.LookupRun(context.Background(), runId) require.NoError(err) assert.Equal(string(job.Running), run.Status) assert.Equal(uint32(10), run.TotalCount) @@ -364,7 +362,6 @@ func TestSchedulerJobProgress(t *testing.T) { func TestSchedulerMonitorLoop(t *testing.T) { // do not use t.Parallel() since it relies on the sys eventer - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -416,9 +413,9 @@ func TestSchedulerMonitorLoop(t *testing.T) { // Wait for scheduler to interrupt job <-jobDone - repo, err := job.NewRepository(ctx, rw, rw, kmsCache) + repo, err := job.NewRepository(rw, rw, kmsCache) require.NoError(err) - run, err := repo.LookupRun(ctx, runId) + run, err := repo.LookupRun(context.Background(), runId) require.NoError(err) assert.Equal(string(job.Interrupted), run.Status) baseCnl() @@ -430,7 +427,6 @@ func TestSchedulerMonitorLoop(t *testing.T) { func TestSchedulerFinalStatusUpdate(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -485,7 +481,7 @@ func TestSchedulerFinalStatusUpdate(t *testing.T) { // Report status jobStatus <- JobStatus{Total: 10, Completed: 10} - repo, err := job.NewRepository(ctx, rw, rw, kmsCache) + repo, err := job.NewRepository(rw, rw, kmsCache) require.NoError(err) run := waitForRunStatus(t, repo, runId, string(job.Failed)) @@ -506,7 +502,7 @@ func TestSchedulerFinalStatusUpdate(t *testing.T) { // Report status jobStatus <- JobStatus{Total: 20, Completed: 20} - repo, err = job.NewRepository(ctx, rw, rw, kmsCache) + repo, err = job.NewRepository(rw, rw, kmsCache) require.NoError(err) run = waitForRunStatus(t, repo, runId, string(job.Completed)) @@ -521,7 +517,6 @@ func TestSchedulerFinalStatusUpdate(t *testing.T) { func TestSchedulerRunNow(t *testing.T) { // do not use t.Parallel() since it relies on the sys eventer - ctx := context.Background() require := require.New(t) conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) @@ -555,7 +550,7 @@ func TestSchedulerRunNow(t *testing.T) { err = sched.RegisterJob(context.Background(), tj) require.NoError(err) - baseCtx, baseCnl := context.WithCancel(ctx) + baseCtx, baseCnl := context.WithCancel(context.Background()) defer baseCnl() var wg sync.WaitGroup err = sched.Start(baseCtx, &wg) @@ -572,7 +567,7 @@ func TestSchedulerRunNow(t *testing.T) { // Complete job jobCh <- struct{}{} - repo, err := job.NewRepository(ctx, rw, rw, kmsCache) + repo, err := job.NewRepository(rw, rw, kmsCache) require.NoError(err) waitForRunStatus(t, repo, runId, string(job.Completed)) diff --git a/internal/scheduler/job.go b/internal/scheduler/job.go index 2e755544070..88d998db748 100644 --- a/internal/scheduler/job.go +++ b/internal/scheduler/job.go @@ -42,16 +42,16 @@ type JobStatus struct { Completed, Total int } -func validateJob(ctx context.Context, j Job) error { +func validateJob(j Job) error { const op = "scheduler.validateJob" if j == nil { - return errors.New(ctx, errors.InvalidParameter, op, "missing job") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing job") } if j.Name() == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing name") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing name") } if j.Description() == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing description") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing description") } return nil } diff --git a/internal/scheduler/job/additional_verification_test.go b/internal/scheduler/job/additional_verification_test.go index b11e5d08975..703ee3c8224 100644 --- a/internal/scheduler/job/additional_verification_test.go +++ b/internal/scheduler/job/additional_verification_test.go @@ -18,7 +18,6 @@ import ( func TestJobWorkflow(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") @@ -29,50 +28,50 @@ func TestJobWorkflow(t *testing.T) { server := testController(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) - job, err := repo.UpsertJob(ctx, "job1", "description") + job, err := repo.UpsertJob(context.Background(), "job1", "description") require.NoError(err) require.NotNil(job) assert.Equal(defaultPluginId, job.PluginId) - runs, err := repo.RunJobs(ctx, server.PrivateId) + runs, err := repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) assert.Len(runs, 1) run := runs[0] assert.NotEmpty(run.PrivateId) assert.Equal(job.Name, run.JobName) - run, err = repo.UpdateProgress(ctx, run.PrivateId, 100, 110) + run, err = repo.UpdateProgress(context.Background(), run.PrivateId, 100, 110) require.NoError(err) assert.Equal(uint32(100), run.CompletedCount) assert.Equal(uint32(110), run.TotalCount) // The only available job is already running, a request for work should return nil - newRuns, err := repo.RunJobs(ctx, server.PrivateId) + newRuns, err := repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) assert.Nil(newRuns) - run, err = repo.CompleteRun(ctx, run.PrivateId, time.Hour, 0, 0) + run, err = repo.CompleteRun(context.Background(), run.PrivateId, time.Hour, 0, 0) require.NoError(err) assert.Equal(Completed.string(), run.Status) - job, err = repo.LookupJob(ctx, job.Name) + job, err = repo.LookupJob(context.Background(), job.Name) require.NoError(err) assert.NotNil(job) // The only available job has a next run in the future, a request for work should return nil - newRuns, err = repo.RunJobs(ctx, server.PrivateId) + newRuns, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) assert.Nil(newRuns) // Update job next run to time in past - job, err = repo.UpdateJobNextRunInAtLeast(ctx, job.Name, 0) + job, err = repo.UpdateJobNextRunInAtLeast(context.Background(), job.Name, 0) require.NoError(err) // Now that next scheduled time is in past, a request for work should return a Run - newRuns, err = repo.RunJobs(ctx, server.PrivateId) + newRuns, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) require.Len(newRuns, 1) newRun := newRuns[0] @@ -81,16 +80,16 @@ func TestJobWorkflow(t *testing.T) { assert.NotEqual(run.PrivateId, newRun.PrivateId) // The only available job is already running, a request for work should return nil - newRuns, err = repo.RunJobs(ctx, server.PrivateId) + newRuns, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) assert.Nil(newRuns) - newRun, err = repo.FailRun(ctx, newRun.PrivateId, 0, 0) + newRun, err = repo.FailRun(context.Background(), newRun.PrivateId, 0, 0) require.NoError(err) assert.Equal(Failed.string(), newRun.Status) // Run failed so the job should be available for work immediately - newRuns, err = repo.RunJobs(ctx, server.PrivateId) + newRuns, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) assert.Len(newRuns, 1) } diff --git a/internal/scheduler/job/repository.go b/internal/scheduler/job/repository.go index 91fa35ecf5a..268c4f6a181 100644 --- a/internal/scheduler/job/repository.go +++ b/internal/scheduler/job/repository.go @@ -4,8 +4,6 @@ package job import ( - "context" - "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" "github.com/hashicorp/boundary/internal/kms" @@ -27,15 +25,15 @@ type Repository struct { // only be used for one transaction and it is not safe for concurrent go // routines to access it. WithLimit option is used as a repo wide default // limit applied to all ListX methods. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { const op = "job.NewRepository" switch { case r == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db reader") case w == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db writer") case kms == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing kms") } opts := getOpts(opt...) diff --git a/internal/scheduler/job/repository_job_test.go b/internal/scheduler/job/repository_job_test.go index 39afa056aed..fb7316a5ecb 100644 --- a/internal/scheduler/job/repository_job_test.go +++ b/internal/scheduler/job/repository_job_test.go @@ -23,7 +23,6 @@ import ( func TestRepository_UpsertJob(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -78,10 +77,10 @@ func TestRepository_UpsertJob(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.UpsertJob(ctx, tt.in.name, tt.in.description) + got, err := repo.UpsertJob(context.Background(), tt.in.name, tt.in.description) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -100,17 +99,17 @@ func TestRepository_UpsertJob(t *testing.T) { t.Run("re-register-same-names", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) - got, err := repo.UpsertJob(ctx, "test-dup-name", "description") + got, err := repo.UpsertJob(context.Background(), "test-dup-name", "description") require.NoError(err) require.NotNil(got) assert.Equal("test-dup-name", got.Name) assert.Equal("description", got.Description) - got2, err := repo.UpsertJob(ctx, "test-dup-name", "updated description") + got2, err := repo.UpsertJob(context.Background(), "test-dup-name", "updated description") require.NoError(err) require.NotNil(got2) assert.Equal("test-dup-name", got2.Name) @@ -120,7 +119,6 @@ func TestRepository_UpsertJob(t *testing.T) { func TestRepository_LookupJob(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -158,10 +156,10 @@ func TestRepository_LookupJob(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.LookupJob(ctx, tt.in) + got, err := repo.LookupJob(context.Background(), tt.in) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -184,7 +182,6 @@ func TestRepository_LookupJob(t *testing.T) { func TestRepository_deleteJob(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -223,10 +220,10 @@ func TestRepository_deleteJob(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.deleteJob(ctx, tt.in) + got, err := repo.deleteJob(context.Background(), tt.in) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -242,7 +239,6 @@ func TestRepository_deleteJob(t *testing.T) { func TestRepository_ListJobs(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -288,10 +284,10 @@ func TestRepository_ListJobs(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListJobs(ctx, tt.opts...) + got, err := repo.ListJobs(context.Background(), tt.opts...) require.NoError(err) opts := []cmp.Option{ cmpopts.SortSlices(func(x, y *Job) bool { return x.Name < y.Name }), @@ -304,7 +300,6 @@ func TestRepository_ListJobs(t *testing.T) { func TestRepository_ListJobs_Limits(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -365,10 +360,10 @@ func TestRepository_ListJobs_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms, tt.repoOpts...) + repo, err := NewRepository(rw, rw, kms, tt.repoOpts...) assert.NoError(err) require.NotNil(repo) - got, err := repo.ListJobs(ctx, tt.listOpts...) + got, err := repo.ListJobs(context.Background(), tt.listOpts...) require.NoError(err) assert.Len(got, tt.wantLen) }) @@ -377,7 +372,6 @@ func TestRepository_ListJobs_Limits(t *testing.T) { func TestRepository_UpdateJobNextRunInAtLeast(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -386,7 +380,7 @@ func TestRepository_UpdateJobNextRunInAtLeast(t *testing.T) { t.Run("valid", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) @@ -407,7 +401,7 @@ func TestRepository_UpdateJobNextRunInAtLeast(t *testing.T) { t.Run("next-run-already-sooner", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) job, err := repo.UpsertJob(context.Background(), "next-run-already-sooner", "description", WithNextRunIn(time.Hour)) @@ -436,7 +430,7 @@ func TestRepository_UpdateJobNextRunInAtLeast(t *testing.T) { t.Run("no-name", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) @@ -449,7 +443,7 @@ func TestRepository_UpdateJobNextRunInAtLeast(t *testing.T) { t.Run("job-not-found", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) diff --git a/internal/scheduler/job/repository_run_test.go b/internal/scheduler/job/repository_run_test.go index bd67d6bb388..c0e4c505637 100644 --- a/internal/scheduler/job/repository_run_test.go +++ b/internal/scheduler/job/repository_run_test.go @@ -21,7 +21,6 @@ import ( func TestRepository_RunJobs(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -96,11 +95,11 @@ func TestRepository_RunJobs(t *testing.T) { testJob(t, conn, tt.job.Name, tt.job.Description, wrapper) } - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.RunJobs(ctx, tt.ControllerId) + got, err := repo.RunJobs(context.Background(), tt.ControllerId) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -122,7 +121,6 @@ func TestRepository_RunJobs(t *testing.T) { func TestRepository_RunJobs_Limits(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -167,7 +165,7 @@ func TestRepository_RunJobs_Limits(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) @@ -175,12 +173,12 @@ func TestRepository_RunJobs_Limits(t *testing.T) { testJob(t, conn, fmt.Sprintf("%v-%d", tt.name, i), "description", wrapper) } - got, err := repo.RunJobs(ctx, server.PrivateId, tt.opts...) + got, err := repo.RunJobs(context.Background(), server.PrivateId, tt.opts...) require.NoError(err) assert.Len(got, tt.wantLen) // Clean up jobs for next run - rows, err := rw.Query(ctx, "delete from job", nil) + rows, err := rw.Query(context.Background(), "delete from job", nil) require.NoError(err) _ = rows.Close() }) @@ -189,7 +187,6 @@ func TestRepository_RunJobs_Limits(t *testing.T) { func TestRepository_RunJobsOrder(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -199,7 +196,7 @@ func TestRepository_RunJobsOrder(t *testing.T) { server := testController(t, conn, wrapper) assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -208,7 +205,7 @@ func TestRepository_RunJobsOrder(t *testing.T) { firstJob := testJob(t, conn, "past", "description", wrapper, WithNextRunIn(-24*time.Hour)) middleJob := testJob(t, conn, "current", "description", wrapper, WithNextRunIn(-12*time.Hour)) - runs, err := repo.RunJobs(ctx, server.PrivateId) + runs, err := repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) require.Len(runs, 1) run := runs[0] @@ -216,10 +213,10 @@ func TestRepository_RunJobsOrder(t *testing.T) { assert.Equal(run.JobPluginId, firstJob.PluginId) // End first job with time between last and middle - _, err = repo.CompleteRun(ctx, run.PrivateId, -6*time.Hour, 0, 0) + _, err = repo.CompleteRun(context.Background(), run.PrivateId, -6*time.Hour, 0, 0) require.NoError(err) - runs, err = repo.RunJobs(ctx, server.PrivateId) + runs, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) require.Len(runs, 1) run = runs[0] @@ -227,14 +224,14 @@ func TestRepository_RunJobsOrder(t *testing.T) { assert.Equal(run.JobPluginId, middleJob.PluginId) // firstJob should be up again, as it is scheduled before lastJob - runs, err = repo.RunJobs(ctx, server.PrivateId) + runs, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) require.Len(runs, 1) run = runs[0] assert.Equal(run.JobName, firstJob.Name) assert.Equal(run.JobPluginId, firstJob.PluginId) - runs, err = repo.RunJobs(ctx, server.PrivateId) + runs, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) require.Len(runs, 1) run = runs[0] @@ -242,14 +239,13 @@ func TestRepository_RunJobsOrder(t *testing.T) { assert.Equal(run.JobPluginId, lastJob.PluginId) // All jobs are running no work should be returned - runs, err = repo.RunJobs(ctx, server.PrivateId) + runs, err = repo.RunJobs(context.Background(), server.PrivateId) require.NoError(err) require.Len(runs, 0) } func TestRepository_UpdateProgress(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -410,19 +406,19 @@ func TestRepository_UpdateProgress(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) assert.NoError(err) require.NotNil(repo) var privateId string if tt.orig != nil { - err = rw.Create(ctx, tt.orig) + err = rw.Create(context.Background(), tt.orig) assert.NoError(err) assert.Empty(tt.orig.EndTime) privateId = tt.orig.PrivateId } - got, err := repo.UpdateProgress(ctx, privateId, tt.args.completed, tt.args.total) + got, err := repo.UpdateProgress(context.Background(), privateId, tt.args.completed, tt.args.total) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -430,7 +426,7 @@ func TestRepository_UpdateProgress(t *testing.T) { if tt.orig != nil { // Delete job run so it does not clash with future runs - _, err = repo.deleteRun(ctx, privateId) + _, err = repo.deleteRun(context.Background(), privateId) assert.NoError(err) } @@ -451,11 +447,11 @@ func TestRepository_UpdateProgress(t *testing.T) { t.Run("job-run-not-found", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) - got, err := repo.UpdateProgress(ctx, "fake-run-id", 0, 0) + got, err := repo.UpdateProgress(context.Background(), "fake-run-id", 0, 0) require.Error(err) require.Nil(got) assert.Truef(errors.Match(errors.T(errors.RecordNotFound), err), "Unexpected error %s", err) @@ -465,7 +461,6 @@ func TestRepository_UpdateProgress(t *testing.T) { func TestRepository_CompleteRun(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -565,19 +560,19 @@ func TestRepository_CompleteRun(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) assert.NoError(err) require.NotNil(repo) var privateId string if tt.orig != nil { - err = rw.Create(ctx, tt.orig) + err = rw.Create(context.Background(), tt.orig) require.NoError(err) assert.Empty(tt.orig.EndTime) privateId = tt.orig.PrivateId } - got, err := repo.CompleteRun(ctx, privateId, tt.nextRunIn, tt.args.completed, tt.args.total) + got, err := repo.CompleteRun(context.Background(), privateId, tt.nextRunIn, tt.args.completed, tt.args.total) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -585,7 +580,7 @@ func TestRepository_CompleteRun(t *testing.T) { if tt.orig != nil { // Delete job run so it does not clash with future runs - _, err = repo.deleteRun(ctx, privateId) + _, err = repo.deleteRun(context.Background(), privateId) assert.NoError(err) } @@ -598,7 +593,7 @@ func TestRepository_CompleteRun(t *testing.T) { assert.Equal(tt.args.completed, int(got.CompletedCount)) assert.Equal(tt.args.total, int(got.TotalCount)) - updatedJob, err := repo.LookupJob(ctx, tt.orig.JobName) + updatedJob, err := repo.LookupJob(context.Background(), tt.orig.JobName) assert.NoError(err) require.NotNil(updatedJob) @@ -610,18 +605,18 @@ func TestRepository_CompleteRun(t *testing.T) { assert.Equal(nextRunAt.Round(time.Minute), previousRunEnd.Add(tt.nextRunIn).Round(time.Minute)) // Delete job run so it does not clash with future runs - _, err = repo.deleteRun(ctx, privateId) + _, err = repo.deleteRun(context.Background(), privateId) assert.NoError(err) }) } t.Run("job-run-not-found", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) - got, err := repo.CompleteRun(ctx, "fake-run-id", time.Hour, 0, 0) + got, err := repo.CompleteRun(context.Background(), "fake-run-id", time.Hour, 0, 0) require.Error(err) require.Nil(got) assert.Truef(errors.Match(errors.T(errors.RecordNotFound), err), "Unexpected error %s", err) @@ -631,7 +626,6 @@ func TestRepository_CompleteRun(t *testing.T) { func TestRepository_FailRun(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -729,19 +723,19 @@ func TestRepository_FailRun(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) assert.NoError(err) require.NotNil(repo) var privateId string if tt.orig != nil { - err = rw.Create(ctx, tt.orig) + err = rw.Create(context.Background(), tt.orig) assert.NoError(err) assert.Empty(tt.orig.EndTime) privateId = tt.orig.PrivateId } - got, err := repo.FailRun(ctx, privateId, tt.args.completed, tt.args.total) + got, err := repo.FailRun(context.Background(), privateId, tt.args.completed, tt.args.total) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -749,7 +743,7 @@ func TestRepository_FailRun(t *testing.T) { if tt.orig != nil { // Delete job run so it does not clash with future runs - _, err = repo.deleteRun(ctx, privateId) + _, err = repo.deleteRun(context.Background(), privateId) assert.NoError(err) } @@ -770,11 +764,11 @@ func TestRepository_FailRun(t *testing.T) { t.Run("job-run-not-found", func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) require.NotNil(repo) - got, err := repo.FailRun(ctx, "fake-run-id", 0, 0) + got, err := repo.FailRun(context.Background(), "fake-run-id", 0, 0) require.Error(err) require.Nil(got) assert.Truef(errors.Match(errors.T(errors.RecordNotFound), err), "Unexpected error %s", err) @@ -784,7 +778,6 @@ func TestRepository_FailRun(t *testing.T) { func TestRepository_InterruptRuns(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -839,7 +832,7 @@ func TestRepository_InterruptRuns(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) assert.NoError(err) require.NotNil(repo) @@ -853,7 +846,7 @@ func TestRepository_InterruptRuns(t *testing.T) { _, err = testRunWithUpdateTime(conn, job4.PluginId, job4.Name, server.PrivateId, time.Now().Add(-7*time.Hour)) require.NoError(err) - runs, err := repo.InterruptRuns(ctx, tt.threshold) + runs, err := repo.InterruptRuns(context.Background(), tt.threshold) require.NoError(err) assert.Equal(len(runs), len(tt.expectedInterrupts)) for _, eJob := range tt.expectedInterrupts { @@ -868,7 +861,7 @@ func TestRepository_InterruptRuns(t *testing.T) { } // Interrupt all runs for next test - _, err = repo.InterruptRuns(ctx, 0) + _, err = repo.InterruptRuns(context.Background(), 0) assert.NoError(err) }) } @@ -876,7 +869,6 @@ func TestRepository_InterruptRuns(t *testing.T) { func TestRepository_InterruptServerRuns(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) @@ -1089,11 +1081,11 @@ func TestRepository_InterruptServerRuns(t *testing.T) { for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) for _, r := range tt.runs { - runs, err := repo.RunJobs(ctx, r.ControllerId, r.opts...) + runs, err := repo.RunJobs(context.Background(), r.ControllerId, r.opts...) require.NoError(err) assert.Len(runs, len(r.expectedJobs)) sort.Slice(runs, func(i, j int) bool { return runs[i].JobName < runs[j].JobName }) @@ -1157,7 +1149,6 @@ func TestRepository_DuplicateJobRun(t *testing.T) { func TestRepository_LookupJobRun(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1199,10 +1190,10 @@ func TestRepository_LookupJobRun(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.LookupRun(ctx, tt.in) + got, err := repo.LookupRun(context.Background(), tt.in) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) @@ -1218,7 +1209,6 @@ func TestRepository_LookupJobRun(t *testing.T) { func TestRepository_deleteJobRun(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -1262,10 +1252,10 @@ func TestRepository_deleteJobRun(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) assert.NoError(err) require.NotNil(repo) - got, err := repo.deleteRun(ctx, tt.in) + got, err := repo.deleteRun(context.Background(), tt.in) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) diff --git a/internal/scheduler/job/repository_test.go b/internal/scheduler/job/repository_test.go index 539d14cdd4e..a2cb2cd8282 100644 --- a/internal/scheduler/job/repository_test.go +++ b/internal/scheduler/job/repository_test.go @@ -4,7 +4,6 @@ package job import ( - "context" "testing" "github.com/hashicorp/boundary/internal/db" @@ -120,7 +119,7 @@ func TestRepository_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewRepository(context.Background(), tt.args.r, tt.args.w, tt.args.kms, tt.opts...) + got, err := NewRepository(tt.args.r, tt.args.w, tt.args.kms, tt.opts...) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) diff --git a/internal/scheduler/job/testing.go b/internal/scheduler/job/testing.go index 0590656bf26..1e6cc86f88a 100644 --- a/internal/scheduler/job/testing.go +++ b/internal/scheduler/job/testing.go @@ -21,15 +21,14 @@ import ( func testJob(t testing.TB, conn *db.DB, name, description string, wrapper wrapping.Wrapper, opt ...Option) *Job { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) - job, err := repo.UpsertJob(ctx, name, description, opt...) + job, err := repo.UpsertJob(context.Background(), name, description, opt...) require.NoError(err) require.NotNil(job) @@ -96,10 +95,9 @@ func testRunWithUpdateTime(conn *db.DB, pluginId, name, cId string, updateTime t func testController(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ...testOption) *store.Controller { t.Helper() - ctx := context.Background() rw := db.New(conn) kms := kms.TestKms(t, conn, wrapper) - serversRepo, err := server.NewRepository(ctx, rw, rw, kms) + serversRepo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) opts := getTestOpts(t, opt...) @@ -115,7 +113,7 @@ func testController(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ... PrivateId: privateId, Address: "127.0.0.1", } - _, err = serversRepo.UpsertController(ctx, controller) + _, err = serversRepo.UpsertController(context.Background(), controller) require.NoError(t, err) return controller } diff --git a/internal/scheduler/scheduler.go b/internal/scheduler/scheduler.go index 30db0cca2ab..d998b64e91b 100644 --- a/internal/scheduler/scheduler.go +++ b/internal/scheduler/scheduler.go @@ -47,13 +47,13 @@ type Scheduler struct { // // WithRunJobsLimit, WithRunJobsInterval, WithMonitorInterval and WithInterruptThreshold are // the only valid options. -func New(ctx context.Context, serverId string, jobRepoFn jobRepoFactory, opt ...Option) (*Scheduler, error) { +func New(serverId string, jobRepoFn jobRepoFactory, opt ...Option) (*Scheduler, error) { const op = "scheduler.New" if serverId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing server id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing server id") } if jobRepoFn == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing job repo function") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing job repo function") } opts := getOpts(opt...) @@ -77,7 +77,7 @@ func New(ctx context.Context, serverId string, jobRepoFn jobRepoFactory, opt ... // WithNextRunIn is the only valid options. func (s *Scheduler) RegisterJob(ctx context.Context, j Job, opt ...Option) error { const op = "scheduler.(Scheduler).RegisterJob" - if err := validateJob(ctx, j); err != nil { + if err := validateJob(j); err != nil { return errors.Wrap(ctx, err, op) } diff --git a/internal/scheduler/scheduler_test.go b/internal/scheduler/scheduler_test.go index f186c6e5f45..a125c0c2341 100644 --- a/internal/scheduler/scheduler_test.go +++ b/internal/scheduler/scheduler_test.go @@ -20,7 +20,6 @@ import ( func TestScheduler_New(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -28,7 +27,7 @@ func TestScheduler_New(t *testing.T) { iam.TestRepo(t, conn, wrapper) jobRepoFn := func() (*job.Repository, error) { - return job.NewRepository(ctx, rw, rw, kmsCache) + return job.NewRepository(rw, rw, kmsCache) } type args struct { @@ -163,7 +162,7 @@ func TestScheduler_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := New(ctx, tt.args.serverId, tt.args.jobRepo, tt.opts...) + got, err := New(tt.args.serverId, tt.args.jobRepo, tt.opts...) if tt.wantErr { require.Error(err) assert.Truef(errors.Match(errors.T(tt.wantErrCode), err), "Unexpected error %s", err) diff --git a/internal/scheduler/testing.go b/internal/scheduler/testing.go index 5ac4985baac..60d0fbad5f0 100644 --- a/internal/scheduler/testing.go +++ b/internal/scheduler/testing.go @@ -29,10 +29,9 @@ import ( func TestScheduler(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, opt ...Option) *Scheduler { t.Helper() - ctx := context.Background() rw := db.New(conn) kmsCache := kms.TestKms(t, conn, wrapper) - serversRepo, err := server.NewRepository(ctx, rw, rw, kmsCache) + serversRepo, err := server.NewRepository(rw, rw, kmsCache) require.NoError(t, err) iam.TestRepo(t, conn, wrapper) @@ -42,14 +41,14 @@ func TestScheduler(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, opt ...O PrivateId: "test-job-server-" + id, Address: "127.0.0.1", } - _, err = serversRepo.UpsertController(ctx, controller) + _, err = serversRepo.UpsertController(context.Background(), controller) require.NoError(t, err) jobRepoFn := func() (*job.Repository, error) { - return job.NewRepository(ctx, rw, rw, kmsCache) + return job.NewRepository(rw, rw, kmsCache) } - s, err := New(ctx, controller.PrivateId, jobRepoFn, opt...) + s, err := New(controller.PrivateId, jobRepoFn, opt...) require.NoError(t, err) return s diff --git a/internal/server/public_ids.go b/internal/server/public_ids.go index 75fe90d5aad..ff80168e605 100644 --- a/internal/server/public_ids.go +++ b/internal/server/public_ids.go @@ -12,7 +12,7 @@ import ( ) func newWorkerId(ctx context.Context) (string, error) { - id, err := db.NewPublicId(ctx, globals.WorkerPrefix) + id, err := db.NewPublicId(globals.WorkerPrefix) if err != nil { return "", errors.Wrap(ctx, err, "server.newWorkerId") } @@ -24,7 +24,7 @@ func newWorkerId(ctx context.Context) (string, error) { // upsert time. func NewWorkerIdFromScopeAndName(ctx context.Context, scope, name string) (string, error) { const op = "server.NewWorkerIdFromScopeAndName" - id, err := db.NewPublicId(ctx, globals.WorkerPrefix, db.WithPrngValues([]string{scope, name})) + id, err := db.NewPublicId(globals.WorkerPrefix, db.WithPrngValues([]string{scope, name})) if err != nil { return "", errors.Wrap(ctx, err, "server.newWorkerId") } diff --git a/internal/server/repository.go b/internal/server/repository.go index bace7728409..4c6c16c680b 100644 --- a/internal/server/repository.go +++ b/internal/server/repository.go @@ -4,7 +4,6 @@ package server import ( - "context" "reflect" "time" @@ -30,16 +29,16 @@ type Repository struct { // NewRepository creates a new server Repository. Supports the options: WithLimit // which sets a default limit on results returned by repo operations. -func NewRepository(ctx context.Context, r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { +func NewRepository(r db.Reader, w db.Writer, kms *kms.Kms, opt ...Option) (*Repository, error) { const op = "server.NewRepository" if r == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil reader") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil reader") } if w == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil writer") } if kms == nil { - return nil, errors.New(ctx, errors.InvalidParameter, op, "nil kms") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "nil kms") } opts := GetOpts(opt...) diff --git a/internal/server/repository_controller_test.go b/internal/server/repository_controller_test.go index 4976f8942dc..9e0d1944574 100644 --- a/internal/server/repository_controller_test.go +++ b/internal/server/repository_controller_test.go @@ -21,7 +21,7 @@ func TestRepository_UpsertController(t *testing.T) { rw := db.New(conn) wrapper := db.TestWrapper(t) testKms := kms.TestKms(t, conn, wrapper) - testRepo, err := NewRepository(ctx, rw, rw, testKms) + testRepo, err := NewRepository(rw, rw, testKms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) diff --git a/internal/server/repository_worker_test.go b/internal/server/repository_worker_test.go index 0289874e745..89152b30b5a 100644 --- a/internal/server/repository_worker_test.go +++ b/internal/server/repository_worker_test.go @@ -36,12 +36,12 @@ import ( func TestDeleteWorker(t *testing.T) { conn, _ := db.TestSetup(t, "postgres") - ctx := context.Background() rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) + ctx := context.Background() type args struct { worker *server.Worker @@ -78,7 +78,7 @@ func TestDeleteWorker(t *testing.T) { args: args{ worker: func() *server.Worker { w := server.Worker{Worker: &store.Worker{}} - id, err := db.NewPublicId(ctx, "w") + id, err := db.NewPublicId("w") require.NoError(t, err) w.PublicId = id return &w @@ -111,13 +111,13 @@ func TestDeleteWorker(t *testing.T) { } func TestLookupWorkerByName(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) + ctx := context.Background() w := server.TestKmsWorker(t, conn, wrapper) t.Run("success", func(t *testing.T) { @@ -133,8 +133,8 @@ func TestLookupWorkerByName(t *testing.T) { t.Run("db error", func(t *testing.T) { conn, mock := db.TestSetupWithMock(t) rw := db.New(conn) - mock.ExpectQuery(`SELECT`).WillReturnError(errors.New(ctx, errors.Internal, "test", "lookup-error")) - r, err := server.NewRepository(ctx, rw, rw, kms) + mock.ExpectQuery(`SELECT`).WillReturnError(errors.New(context.Background(), errors.Internal, "test", "lookup-error")) + r, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) got, err := r.LookupWorkerByName(ctx, w.GetName()) assert.NoError(t, mock.ExpectationsWereMet()) @@ -144,14 +144,14 @@ func TestLookupWorkerByName(t *testing.T) { } func TestLookupWorkerIdByKeyId(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrapper) - require.NoError(t, kmsCache.CreateKeys(ctx, scope.Global.String(), kms.WithRandomReader(rand.Reader))) - repo, err := server.NewRepository(ctx, rw, rw, kmsCache) + require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) + repo, err := server.NewRepository(rw, rw, kmsCache) require.NoError(t, err) + ctx := context.Background() var workerKeyId string w := server.TestPkiWorker(t, conn, wrapper, server.WithTestPkiWorkerAuthorizedKeyId(&workerKeyId)) t.Run("success", func(t *testing.T) { @@ -168,7 +168,7 @@ func TestLookupWorkerIdByKeyId(t *testing.T) { conn, mock := db.TestSetupWithMock(t) rw := db.New(conn) mock.ExpectQuery(`SELECT`).WillReturnError(errors.New(context.Background(), errors.Internal, "test", "lookup-error")) - r, err := server.NewRepository(ctx, rw, rw, kmsCache) + r, err := server.NewRepository(rw, rw, kmsCache) require.NoError(t, err) got, err := r.LookupWorkerIdByKeyId(ctx, "somekey") assert.NoError(t, mock.ExpectationsWereMet()) @@ -178,13 +178,13 @@ func TestLookupWorkerIdByKeyId(t *testing.T) { } func TestLookupWorker(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) + ctx := context.Background() w := server.TestKmsWorker(t, conn, wrapper, server.WithName("name"), @@ -244,7 +244,7 @@ func TestLookupWorker(t *testing.T) { conn, mock := db.TestSetupWithMock(t) rw := db.New(conn) mock.ExpectQuery(`SELECT`).WillReturnError(errors.New(context.Background(), errors.Internal, "test", "lookup-error")) - r, err := server.NewRepository(ctx, rw, rw, kms) + r, err := server.NewRepository(rw, rw, kms) require.NoError(t, err) got, err := r.LookupWorker(ctx, w.GetPublicId()) assert.NoError(t, mock.ExpectationsWereMet()) @@ -254,15 +254,16 @@ func TestLookupWorker(t *testing.T) { } func TestUpsertWorkerStatus(t *testing.T) { - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kmsCache := kms.TestKms(t, conn, wrapper) require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) - repo, err := server.NewRepository(ctx, rw, rw, kmsCache) + repo, err := server.NewRepository(rw, rw, kmsCache) require.NoError(t, err) + ctx := context.Background() + t.Run("create an initial kms worker and update status", func(t *testing.T) { wStatus1 := server.NewWorker(scope.Global.String(), server.WithAddress("address"), server.WithName("config_name1"), @@ -419,7 +420,7 @@ func TestUpsertWorkerStatus(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery(`INSERT`).WillReturnError(errors.New(context.Background(), errors.Internal, "test", "create-error")) mock.ExpectRollback() - r, err := server.NewRepository(ctx, rw, rw, kmsCache) + r, err := server.NewRepository(rw, rw, kmsCache) require.NoError(t, err) return r }(), @@ -489,14 +490,14 @@ func TestUpsertWorkerStatus(t *testing.T) { } func TestTagUpdatingListing(t *testing.T) { - ctx := context.Background() require := require.New(t) conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := server.NewRepository(ctx, rw, rw, kms) + repo, err := server.NewRepository(rw, rw, kms) require.NoError(err) + ctx := context.Background() worker1 := server.TestKmsWorker(t, conn, wrapper) wStatus := server.NewWorker(scope.Global.String(), @@ -545,7 +546,6 @@ func TestTagUpdatingListing(t *testing.T) { func TestListWorkers(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -553,8 +553,9 @@ func TestListWorkers(t *testing.T) { require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) const testLimit = 10 - repo, err := server.NewRepository(ctx, rw, rw, kmsCache, server.WithLimit(testLimit)) + repo, err := server.NewRepository(rw, rw, kmsCache, server.WithLimit(testLimit)) require.NoError(t, err) + ctx := context.Background() tests := []struct { name string @@ -654,7 +655,6 @@ func TestListWorkers(t *testing.T) { func TestListWorkers_WithWorkerPool(t *testing.T) { t.Parallel() - ctx := context.Background() require := require.New(t) conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) @@ -662,8 +662,9 @@ func TestListWorkers_WithWorkerPool(t *testing.T) { kmsCache := kms.TestKms(t, conn, wrapper) require.NoError(kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) - serversRepo, err := server.NewRepository(ctx, rw, rw, kmsCache) + serversRepo, err := server.NewRepository(rw, rw, kmsCache) require.NoError(err) + ctx := context.Background() worker1 := server.TestKmsWorker(t, conn, wrapper) worker2 := server.TestPkiWorker(t, conn, wrapper) @@ -710,14 +711,14 @@ func TestListWorkers_WithWorkerPool(t *testing.T) { func TestListWorkers_WithActiveWorkers(t *testing.T) { t.Parallel() - ctx := context.Background() require := require.New(t) conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - serversRepo, err := server.NewRepository(ctx, rw, rw, kms) + serversRepo, err := server.NewRepository(rw, rw, kms) require.NoError(err) + ctx := context.Background() worker1 := server.TestKmsWorker(t, conn, wrapper) worker2 := server.TestKmsWorker(t, conn, wrapper) @@ -831,14 +832,14 @@ func TestListWorkers_WithActiveWorkers(t *testing.T) { func TestListWorkers_WithLiveness(t *testing.T) { t.Parallel() - ctx := context.Background() require := require.New(t) conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - serversRepo, err := server.NewRepository(ctx, rw, rw, kms) + serversRepo, err := server.NewRepository(rw, rw, kms) require.NoError(err) + ctx := context.Background() worker1 := server.TestKmsWorker(t, conn, wrapper) worker2 := server.TestKmsWorker(t, conn, wrapper) @@ -906,7 +907,7 @@ func TestRepository_CreateWorker(t *testing.T) { rw := db.New(conn) wrapper := db.TestWrapper(t) testKms := kms.TestKms(t, conn, wrapper) - testRepo, err := server.NewRepository(testCtx, rw, rw, testKms) + testRepo, err := server.NewRepository(rw, rw, testKms) require.NoError(t, err) iamRepo := iam.TestRepo(t, conn, wrapper) @@ -948,7 +949,7 @@ func TestRepository_CreateWorker(t *testing.T) { setup: func() *server.Worker { w := server.NewWorker(scope.Global.String()) var err error - w.PublicId, err = db.NewPublicId(testCtx, globals.WorkerPrefix) + w.PublicId, err = db.NewPublicId(globals.WorkerPrefix) require.NoError(t, err) return w }, @@ -1004,7 +1005,7 @@ func TestRepository_CreateWorker(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery(`INSERT`).WillReturnError(errors.New(testCtx, errors.Internal, "test", "create-error")) mock.ExpectRollback() - r, err := server.NewRepository(testCtx, rw, writer, testKms) + r, err := server.NewRepository(rw, writer, testKms) require.NoError(t, err) return r }(), @@ -1060,9 +1061,9 @@ func TestRepository_CreateWorker(t *testing.T) { repo: func() *server.Repository { mockConn, mock := db.TestSetupWithMock(t) mock.ExpectQuery(`SELECT`).WillReturnRows(sqlmock.NewRows([]string{"version", "create_time"}).AddRow(migrations.Version, time.Now())) - mock.ExpectQuery(`SELECT`).WillReturnError(errors.New(testCtx, errors.Internal, "test", "no-database-key")) + mock.ExpectQuery(`SELECT`).WillReturnError(errors.New(context.Background(), errors.Internal, "test", "no-database-key")) k := kms.TestKms(t, mockConn, wrapper) - r, err := server.NewRepository(testCtx, rw, rw, k) + r, err := server.NewRepository(rw, rw, k) require.NoError(t, err) return r }(), @@ -1221,7 +1222,7 @@ func TestRepository_UpdateWorker(t *testing.T) { kmsCache := kms.TestKms(t, conn, wrapper) require.NoError(t, kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) - repo, err := server.NewRepository(ctx, rw, rw, kmsCache) + repo, err := server.NewRepository(rw, rw, kmsCache) require.NoError(t, err) pkiCases := []struct { diff --git a/internal/server/repository_workerauth.go b/internal/server/repository_workerauth.go index 21198c706e6..35a8a223e92 100644 --- a/internal/server/repository_workerauth.go +++ b/internal/server/repository_workerauth.go @@ -113,7 +113,7 @@ func (r *WorkerAuthRepositoryStorage) Store(ctx context.Context, msg nodee.Messa // * the workerAuth record is stored with a reference to a worker // * certificate bundles are stored with a reference to the workerAuth record and issuing root certificate func StoreNodeInformationTx(ctx context.Context, reader db.Reader, writer db.Writer, kmsCache *kms.Kms, scopeId string, node *types.NodeInformation, _ ...Option) error { - const op = "server.(WorkerAuthRepositoryStorage).StoreNodeInformationTx" + const op = "server.(WorkerAuthRepositoryStorage).storeNodeInformation" if isNil(reader) { return errors.New(ctx, errors.InvalidParameter, op, "missing reader") } @@ -233,23 +233,7 @@ func StoreNodeInformationTx(ctx context.Context, reader db.Reader, writer db.Wri return errors.Wrap(ctx, err, op) } - // Check if we already have a workerAuth record for this key id - nodeAuthLookup := allocWorkerAuth() - nodeAuthLookup.WorkerKeyIdentifier = node.Id - if err := reader.LookupById(ctx, nodeAuthLookup); err != nil { - switch { - case errors.IsNotFoundError(err): - // If we didn't find it, that's fine - default: - return errors.Wrap(ctx, err, op) - } - } - - // If the incoming workerAuth matches what we have stored, then we can return as it's already stored - if nodeAuth.compare(nodeAuthLookup) { - return nil - } - + // Store WorkerAuth if err := writer.Create(ctx, &nodeAuth); err != nil { return errors.Wrap(ctx, err, op) } diff --git a/internal/server/repository_workerauth_test.go b/internal/server/repository_workerauth_test.go index 9affc43111a..dffc56ee324 100644 --- a/internal/server/repository_workerauth_test.go +++ b/internal/server/repository_workerauth_test.go @@ -251,7 +251,7 @@ func TestStoreServerLedActivationToken(t *testing.T) { _, err = rotation.RotateRootCertificates(ctx, rootStorage) require.NoError(err) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) require.NoError(err) worker, err := repo.CreateWorker(ctx, &Worker{Worker: &store.Worker{ScopeId: scope.Global.String()}}, WithCreateControllerLedActivationToken(true)) require.NoError(err) @@ -351,28 +351,6 @@ func TestStoreNodeInformationTx(t *testing.T) { } return nodeInfo } - testNodeInfoFn2 := func() *types.NodeInformation { - // This happens on the worker - storage, err := inmem.New(testCtx) - require.NoError(t, err) - nodeCreds, err := types.NewNodeCredentials(testCtx, storage) - require.NoError(t, err) - - nodePubKey, err := curve25519.X25519(nodeCreds.EncryptionPrivateKeyBytes, curve25519.Basepoint) - require.NoError(t, err) - // Add in node information to storage so we have a key to use - nodeInfo := &types.NodeInformation{ - Id: "fake-secondary-key-id", - CertificatePublicKeyPkix: nodeCreds.CertificatePublicKeyPkix, - CertificatePublicKeyType: nodeCreds.CertificatePrivateKeyType, - EncryptionPublicKeyBytes: nodePubKey, - EncryptionPublicKeyType: nodeCreds.EncryptionPrivateKeyType, - ServerEncryptionPrivateKeyBytes: []byte("whatever"), - RegistrationNonce: nodeCreds.RegistrationNonce, - State: testState, - } - return nodeInfo - } // For swapping out key ID for wrapping registration flow wrappingRegFlowStorage, err := inmem.New(testCtx) @@ -396,20 +374,15 @@ func TestStoreNodeInformationTx(t *testing.T) { } tests := []struct { - name string - reader db.Reader - writer db.Writer - scope string - kms *kms.Kms - node *types.NodeInformation - wantErr bool - wantErrIs errors.Code - wantErrContains string - storeTwice bool - secondStoreDifferentNode bool - wantSecondStoreErr bool - wantSecondStoreErrIs errors.Code - wantSecondStoreErrContains string + name string + reader db.Reader + writer db.Writer + scope string + kms *kms.Kms + node *types.NodeInformation + wantErr bool + wantErrIs errors.Code + wantErrContains string }{ { name: "missing-writer", @@ -522,28 +495,12 @@ func TestStoreNodeInformationTx(t *testing.T) { wantErrContains: "in wrapping registration flow but boundary version not provided", }, { - name: "success", - reader: rw, - writer: rw, - scope: scope.Global.String(), - kms: kmsCache, - node: testNodeInfoFn(), - storeTwice: true, - }, - { - // This test will fail because on the second store we change the incoming NodeInformation - // so that it does not match the already inserted record - name: "fail-store-twice-different-node-info", - reader: rw, - writer: rw, - scope: scope.Global.String(), - kms: kmsCache, - node: testNodeInfoFn2(), - storeTwice: true, - secondStoreDifferentNode: true, - wantSecondStoreErr: true, - wantSecondStoreErrIs: errors.NotUnique, - wantSecondStoreErrContains: "server.(WorkerAuthRepositoryStorage).StoreNodeInformationTx: db.Create: duplicate key value violates unique constraint \"worker_auth_authorized_pkey\": unique constraint violation: integrity violation: error #1002", + name: "success", + reader: rw, + writer: rw, + scope: scope.Global.String(), + kms: kmsCache, + node: testNodeInfoFn(), }, { name: "success-wrapflow", @@ -576,29 +533,6 @@ func TestStoreNodeInformationTx(t *testing.T) { return } require.NoError(err) - // Try to store the "same" node information twice - if tc.storeTwice { - node := tc.node - if tc.secondStoreDifferentNode { - storage, err := inmem.New(testCtx) - require.NoError(err) - nodeCreds, err := types.NewNodeCredentials(testCtx, storage) - require.NoError(err) - node.CertificatePublicKeyPkix = nodeCreds.CertificatePublicKeyPkix - } - err = StoreNodeInformationTx(testCtx, tc.reader, tc.writer, tc.kms, tc.scope, node) - if tc.wantSecondStoreErr { - require.Error(err) - if tc.wantSecondStoreErrIs != errors.Unknown { - assert.True(errors.Match(errors.T(tc.wantSecondStoreErrIs), err)) - } - if tc.wantSecondStoreErrContains != "" { - assert.Contains(err.Error(), tc.wantSecondStoreErrContains) - } - return - } - require.NoError(err) - } }) } } @@ -647,7 +581,7 @@ func TestFilterToAuthorizedWorkerKeyIds(t *testing.T) { assert.NoError(t, err) assert.ElementsMatch(t, []string{keyId1, keyId2}, got) - workerRepo, err := NewRepository(ctx, rw, rw, kmsCache) + workerRepo, err := NewRepository(rw, rw, kmsCache) require.NoError(t, err) _, err = workerRepo.DeleteWorker(ctx, w1.GetPublicId()) require.NoError(t, err) diff --git a/internal/server/testing.go b/internal/server/testing.go index a47df6c5594..4eb147c8e38 100644 --- a/internal/server/testing.go +++ b/internal/server/testing.go @@ -97,11 +97,11 @@ func TestWorkerAuth(t *testing.T, conn *db.DB, worker *Worker, kmsWrapper wrappi // random name will be generated and assigned to the worker. func TestKmsWorker(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ...Option) *Worker { t.Helper() - ctx := context.Background() rw := db.New(conn) kms := kms.TestKms(t, conn, wrapper) - serversRepo, err := NewRepository(ctx, rw, rw, kms) + serversRepo, err := NewRepository(rw, rw, kms) require.NoError(t, err) + ctx := context.Background() opts := GetOpts(opt...) if opts.withName == "" { @@ -150,11 +150,11 @@ func TestKmsWorker(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ...O // passed to WithTestPkiWorkerAuthorizedKeyId is set to the key id. func TestPkiWorker(t *testing.T, conn *db.DB, wrapper wrapping.Wrapper, opt ...Option) *Worker { t.Helper() - ctx := context.Background() rw := db.New(conn) kmsCache := kms.TestKms(t, conn, wrapper) - serversRepo, err := NewRepository(ctx, rw, rw, kmsCache) + serversRepo, err := NewRepository(rw, rw, kmsCache) require.NoError(t, err) + ctx := context.Background() opts := GetOpts(opt...) require.NoError(t, err) diff --git a/internal/server/worker_auth.go b/internal/server/worker_auth.go index db7f8b64733..5efdffa71fe 100644 --- a/internal/server/worker_auth.go +++ b/internal/server/worker_auth.go @@ -4,7 +4,6 @@ package server import ( - "bytes" "context" "github.com/hashicorp/boundary/internal/errors" @@ -51,21 +50,6 @@ func (w *WorkerAuth) decrypt(ctx context.Context, cipher wrapping.Wrapper) error return nil } -func (w *WorkerAuth) compare(other *WorkerAuth) bool { - switch { - case w.WorkerId != other.WorkerId: - return false - case !bytes.Equal(w.WorkerEncryptionPubKey, other.WorkerEncryptionPubKey): - return false - case !bytes.Equal(w.WorkerSigningPubKey, other.WorkerSigningPubKey): - return false - case !bytes.Equal(w.Nonce, other.Nonce): - return false - default: - return true - } -} - // WorkerAuthSet is intended to store a set of WorkerAuth records // This set represents the current and previous WorkerAuth records for a worker type WorkerAuthSet struct { diff --git a/internal/server/worker_auth_server_led_activation_token_test.go b/internal/server/worker_auth_server_led_activation_token_test.go index b37100c78b8..4fb7a79afb7 100644 --- a/internal/server/worker_auth_server_led_activation_token_test.go +++ b/internal/server/worker_auth_server_led_activation_token_test.go @@ -27,7 +27,7 @@ func TestWorkerAuthActivationTokenConstraints(t *testing.T) { kmsCache := kms.TestKms(t, conn, wrap) tlRequire.NoError(kmsCache.CreateKeys(context.Background(), scope.Global.String(), kms.WithRandomReader(rand.Reader))) - repo, err := NewRepository(ctx, rw, rw, kmsCache) + repo, err := NewRepository(rw, rw, kmsCache) tlRequire.NoError(err) // First create a worker without an activation token so we can verify it doesn't show up in the table diff --git a/internal/server/worker_tags_test.go b/internal/server/worker_tags_test.go index c23633e2081..e78d08de6dd 100644 --- a/internal/server/worker_tags_test.go +++ b/internal/server/worker_tags_test.go @@ -115,14 +115,13 @@ func TestWorkerTags_Create(t *testing.T) { func TestRepository_AddWorkerTags(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) // WithWorkerTags sets config tags to ensure they are not affected by api tag operations @@ -289,7 +288,6 @@ func TestRepository_AddWorkerTags(t *testing.T) { func TestRepository_SetWorkerTags(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -297,7 +295,7 @@ func TestRepository_SetWorkerTags(t *testing.T) { worker := TestKmsWorker(t, conn, wrapper, WithWorkerTags(&Tag{Key: "key_c", Value: "value_c"})) assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -446,7 +444,6 @@ func TestRepository_SetWorkerTags(t *testing.T) { func TestRepository_DeleteWorkerTags(t *testing.T) { // Note: more delete operation testcases are found in subsequent func TestRepository_WorkerTagsConsequent t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -454,7 +451,7 @@ func TestRepository_DeleteWorkerTags(t *testing.T) { worker := TestKmsWorker(t, conn, wrapper) assert, require := assert.New(t), require.New(t) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) @@ -599,13 +596,12 @@ func TestRepository_DeleteWorkerTags(t *testing.T) { func TestRepository_WorkerTagsConsequent(t *testing.T) { t.Parallel() - ctx := context.Background() assert, require := assert.New(t), require.New(t) conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) kms := kms.TestKms(t, conn, wrapper) - repo, err := NewRepository(ctx, rw, rw, kms) + repo, err := NewRepository(rw, rw, kms) require.NoError(err) require.NotNil(repo) diff --git a/internal/session/connection.go b/internal/session/connection.go index 14662d68ad3..457fbdf1f2c 100644 --- a/internal/session/connection.go +++ b/internal/session/connection.go @@ -59,7 +59,7 @@ var ( // NewConnection creates a new in memory connection. No options // are currently supported. -func NewConnection(ctx context.Context, sessionID, clientTcpAddress string, clientTcpPort uint32, endpointTcpAddr string, endpointTcpPort uint32, userClientIp string, _ ...Option) (*Connection, error) { +func NewConnection(sessionID, clientTcpAddress string, clientTcpPort uint32, endpointTcpAddr string, endpointTcpPort uint32, userClientIp string, _ ...Option) (*Connection, error) { const op = "session.NewConnection" c := Connection{ SessionId: sessionID, @@ -69,8 +69,8 @@ func NewConnection(ctx context.Context, sessionID, clientTcpAddress string, clie EndpointTcpPort: endpointTcpPort, UserClientIp: userClientIp, } - if err := c.validateNewConnection(ctx); err != nil { - return nil, errors.Wrap(ctx, err, op) + if err := c.validateNewConnection(); err != nil { + return nil, errors.WrapDeprecated(err, op) } return &c, nil } @@ -124,7 +124,7 @@ func (c *Connection) VetForWrite(ctx context.Context, _ db.Reader, opType db.OpT } switch opType { case db.CreateOp: - if err := c.validateNewConnection(ctx); err != nil { + if err := c.validateNewConnection(); err != nil { return errors.Wrap(ctx, err, op) } case db.UpdateOp: @@ -138,7 +138,7 @@ func (c *Connection) VetForWrite(ctx context.Context, _ db.Reader, opType db.OpT case contains(opts.WithFieldMaskPaths, "UpdateTime"): return errors.New(ctx, errors.InvalidParameter, op, "update time is immutable") case contains(opts.WithFieldMaskPaths, "ClosedReason"): - if _, err := convertToClosedReason(ctx, c.ClosedReason); err != nil { + if _, err := convertToClosedReason(c.ClosedReason); err != nil { return errors.Wrap(ctx, err, op) } } @@ -162,25 +162,25 @@ func (c *Connection) SetTableName(n string) { } // validateNewConnection checks everything but the connection's PublicId -func (c *Connection) validateNewConnection(ctx context.Context) error { +func (c *Connection) validateNewConnection() error { const op = "session.(Connection).validateNewConnection" if c.SessionId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing session id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing session id") } if c.ClientTcpAddress == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing client address") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing client address") } if c.ClientTcpPort == 0 { - return errors.New(ctx, errors.InvalidParameter, op, "missing client port") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing client port") } if c.EndpointTcpAddress == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing endpoint address") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing endpoint address") } if c.EndpointTcpPort == 0 { - return errors.New(ctx, errors.InvalidParameter, op, "missing endpoint port") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing endpoint port") } if c.UserClientIp == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing user client ip") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing user client ip") } return nil } diff --git a/internal/session/connection_closed_reason.go b/internal/session/connection_closed_reason.go index 357c4dae1fc..32950de2528 100644 --- a/internal/session/connection_closed_reason.go +++ b/internal/session/connection_closed_reason.go @@ -4,7 +4,6 @@ package session import ( - "context" "fmt" "github.com/hashicorp/boundary/internal/errors" @@ -27,7 +26,7 @@ func (r ClosedReason) String() string { return string(r) } -func convertToClosedReason(ctx context.Context, s string) (ClosedReason, error) { +func convertToClosedReason(s string) (ClosedReason, error) { const op = "session.convertToClosedReason" switch s { case UnknownReason.String(): @@ -43,6 +42,6 @@ func convertToClosedReason(ctx context.Context, s string) (ClosedReason, error) case ConnectionSystemError.String(): return ConnectionSystemError, nil default: - return "", errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("%s is not a valid reason", s)) + return "", errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("%s is not a valid reason", s)) } } diff --git a/internal/session/connection_closed_with.go b/internal/session/connection_closed_with.go index 227ad02e231..029e8e75b3a 100644 --- a/internal/session/connection_closed_with.go +++ b/internal/session/connection_closed_with.go @@ -4,8 +4,6 @@ package session import ( - "context" - "github.com/hashicorp/boundary/internal/errors" ) @@ -18,13 +16,13 @@ type CloseWith struct { ClosedReason ClosedReason } -func (c CloseWith) validate(ctx context.Context) error { +func (c CloseWith) validate() error { const op = "session.(CloseWith).validate" if c.ConnectionId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing connection id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing connection id") } if c.ClosedReason.String() == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing closed reason") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing closed reason") } // 0 is valid for BytesUp and BytesDown return nil diff --git a/internal/session/connection_closed_with_test.go b/internal/session/connection_closed_with_test.go index 811f783d2e9..05897d43f91 100644 --- a/internal/session/connection_closed_with_test.go +++ b/internal/session/connection_closed_with_test.go @@ -4,7 +4,6 @@ package session import ( - "context" "testing" "github.com/hashicorp/boundary/internal/db" @@ -64,7 +63,7 @@ func TestClosedWith_validate(t *testing.T) { BytesDown: tt.fields.BytesDown, ClosedReason: tt.fields.ClosedReason, } - if err := c.validate(context.Background()); (err != nil) != tt.wantErr { + if err := c.validate(); (err != nil) != tt.wantErr { t.Errorf("ClosedWith.validate() error = %v, wantErr %v", err, tt.wantErr) } }) diff --git a/internal/session/connection_state.go b/internal/session/connection_state.go index 88c2e9c655b..1a86e02f6e3 100644 --- a/internal/session/connection_state.go +++ b/internal/session/connection_state.go @@ -83,14 +83,14 @@ var ( // NewConnectionState creates a new in memory connection state. No options // are currently supported. -func NewConnectionState(ctx context.Context, connectionId string, state ConnectionStatus, _ ...Option) (*ConnectionState, error) { +func NewConnectionState(connectionId string, state ConnectionStatus, _ ...Option) (*ConnectionState, error) { const op = "session.NewConnectionState" s := ConnectionState{ ConnectionId: connectionId, Status: state, } - if err := s.validate(ctx); err != nil { - return nil, errors.Wrap(ctx, err, op) + if err := s.validate(); err != nil { + return nil, errors.WrapDeprecated(err, op) } return &s, nil } @@ -138,7 +138,7 @@ func (s *ConnectionState) Clone() any { // before it's written. func (s *ConnectionState) VetForWrite(ctx context.Context, _ db.Reader, _ db.OpType, _ ...db.Option) error { const op = "session.(ConnectionState).VetForWrite" - if err := s.validate(ctx); err != nil { + if err := s.validate(); err != nil { return errors.Wrap(ctx, err, op) } return nil @@ -160,22 +160,22 @@ func (s *ConnectionState) SetTableName(n string) { } // validate checks the session state -func (s *ConnectionState) validate(ctx context.Context) error { +func (s *ConnectionState) validate() error { const op = "session.(ConnectionState).validate" if s.Status == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing status") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing status") } if s.ConnectionId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing connection id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing connection id") } if s.StartTime != nil { - return errors.New(ctx, errors.InvalidParameter, op, "start time is not settable") + return errors.NewDeprecated(errors.InvalidParameter, op, "start time is not settable") } if s.EndTime != nil { - return errors.New(ctx, errors.InvalidParameter, op, "end time is not settable") + return errors.NewDeprecated(errors.InvalidParameter, op, "end time is not settable") } if s.PreviousEndTime != nil { - return errors.New(ctx, errors.InvalidParameter, op, "previous end time is not settable") + return errors.NewDeprecated(errors.InvalidParameter, op, "previous end time is not settable") } return nil } diff --git a/internal/session/connection_state_test.go b/internal/session/connection_state_test.go index 7f8336ff290..7b37ed18aad 100644 --- a/internal/session/connection_state_test.go +++ b/internal/session/connection_state_test.go @@ -16,7 +16,6 @@ import ( func TestConnectionState_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrapper) @@ -68,7 +67,7 @@ func TestConnectionState_Create(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewConnectionState(ctx, tt.args.connectionId, tt.args.status) + got, err := NewConnectionState(tt.args.connectionId, tt.args.status) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -77,7 +76,7 @@ func TestConnectionState_Create(t *testing.T) { require.NoError(err) assert.Equal(tt.want, got) if tt.create { - err = db.New(conn).Create(ctx, got) + err = db.New(conn).Create(context.Background(), got) if tt.wantCreateErr { assert.Error(err) return @@ -91,7 +90,6 @@ func TestConnectionState_Create(t *testing.T) { func TestConnectionState_Delete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -118,7 +116,7 @@ func TestConnectionState_Delete(t *testing.T) { name: "bad-id", state: TestConnectionState(t, conn, c2.PublicId, StatusClosed), deleteConnectionStateId: func() string { - id, err := db.NewPublicId(ctx, ConnectionStatePrefix) + id, err := db.NewPublicId(ConnectionStatePrefix) require.NoError(t, err) return id }(), @@ -141,7 +139,7 @@ func TestConnectionState_Delete(t *testing.T) { deleteState.ConnectionId = tt.state.ConnectionId } deleteState.StartTime = initialState.StartTime - deletedRows, err := rw.Delete(ctx, &deleteState) + deletedRows, err := rw.Delete(context.Background(), &deleteState) if tt.wantErr { require.Error(err) return @@ -153,7 +151,7 @@ func TestConnectionState_Delete(t *testing.T) { } assert.Equal(tt.wantRowsDeleted, deletedRows) foundState := allocConnectionState() - err = rw.LookupWhere(ctx, &foundState, "connection_id = ? and start_time = ?", []any{tt.state.ConnectionId, initialState.StartTime}) + err = rw.LookupWhere(context.Background(), &foundState, "connection_id = ? and start_time = ?", []any{tt.state.ConnectionId, initialState.StartTime}) require.Error(err) assert.True(errors.IsNotFoundError(err)) }) diff --git a/internal/session/connection_test.go b/internal/session/connection_test.go index ee31b0799a7..832ac39e252 100644 --- a/internal/session/connection_test.go +++ b/internal/session/connection_test.go @@ -16,7 +16,6 @@ import ( func TestConnection_Create(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrapper) @@ -136,7 +135,6 @@ func TestConnection_Create(t *testing.T) { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) got, err := NewConnection( - ctx, tt.args.sessionId, tt.args.clientTcpAddress, tt.args.clientTcpPort, @@ -152,10 +150,10 @@ func TestConnection_Create(t *testing.T) { require.NoError(err) assert.Equal(tt.want, got) if tt.create { - id, err := db.NewPublicId(ctx, ConnectionPrefix) + id, err := db.NewPublicId(ConnectionPrefix) require.NoError(err) got.PublicId = id - err = db.New(conn).Create(ctx, got) + err = db.New(conn).Create(context.Background(), got) if tt.wantCreateErr { assert.Error(err) return @@ -169,7 +167,6 @@ func TestConnection_Create(t *testing.T) { func TestConnection_Delete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -193,7 +190,7 @@ func TestConnection_Delete(t *testing.T) { name: "bad-id", connection: func() *Connection { c := AllocConnection() - id, err := db.NewPublicId(ctx, ConnectionPrefix) + id, err := db.NewPublicId(ConnectionPrefix) require.NoError(t, err) c.PublicId = id return &c diff --git a/internal/session/ids.go b/internal/session/ids.go index 40a4beead94..227cdf2d2b4 100644 --- a/internal/session/ids.go +++ b/internal/session/ids.go @@ -4,8 +4,6 @@ package session import ( - "context" - "github.com/hashicorp/boundary/globals" "github.com/hashicorp/boundary/internal/db" "github.com/hashicorp/boundary/internal/errors" @@ -22,38 +20,38 @@ const ( ConnectionStatePrefix = "scs" ) -func newId(ctx context.Context) (string, error) { +func newId() (string, error) { const op = "session.newId" - id, err := db.NewPublicId(ctx, globals.SessionPrefix) + id, err := db.NewPublicId(globals.SessionPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } -func newStateId(ctx context.Context) (string, error) { +func newStateId() (string, error) { const op = "session.newStateId" - id, err := db.NewPublicId(ctx, StatePrefix) + id, err := db.NewPublicId(StatePrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } -func newConnectionId(ctx context.Context) (string, error) { +func newConnectionId() (string, error) { const op = "session.newConnectionId" - id, err := db.NewPublicId(ctx, ConnectionPrefix) + id, err := db.NewPublicId(ConnectionPrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } -func newConnectionStateId(ctx context.Context) (string, error) { +func newConnectionStateId() (string, error) { const op = "session.newConnectionStateId" - id, err := db.NewPublicId(ctx, ConnectionStatePrefix) + id, err := db.NewPublicId(ConnectionStatePrefix) if err != nil { - return "", errors.Wrap(ctx, err, op) + return "", errors.WrapDeprecated(err, op) } return id, nil } diff --git a/internal/session/ids_test.go b/internal/session/ids_test.go index 318b7a30b7a..0e681dbea1a 100644 --- a/internal/session/ids_test.go +++ b/internal/session/ids_test.go @@ -4,7 +4,6 @@ package session import ( - "context" "strings" "testing" @@ -15,24 +14,23 @@ import ( func Test_Ids(t *testing.T) { t.Parallel() - ctx := context.Background() t.Run("s", func(t *testing.T) { - id, err := newId(ctx) + id, err := newId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, globals.SessionPrefix+"_")) }) t.Run("ss", func(t *testing.T) { - id, err := newStateId(ctx) + id, err := newStateId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, StatePrefix+"_")) }) t.Run("sc", func(t *testing.T) { - id, err := newConnectionId(ctx) + id, err := newConnectionId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, ConnectionPrefix+"_")) }) t.Run("scs", func(t *testing.T) { - id, err := newConnectionStateId(ctx) + id, err := newConnectionStateId() require.NoError(t, err) assert.True(t, strings.HasPrefix(id, ConnectionStatePrefix+"_")) }) diff --git a/internal/session/job_session_cleanup.go b/internal/session/job_session_cleanup.go index e4ede8d2cf9..a37cc5d296e 100644 --- a/internal/session/job_session_cleanup.go +++ b/internal/session/job_session_cleanup.go @@ -45,18 +45,17 @@ type sessionConnectionCleanupJob struct { // newSessionConnectionCleanupJob instantiates the session cleanup job. func newSessionConnectionCleanupJob( - ctx context.Context, writer db.Writer, gracePeriod *atomic.Int64, ) (*sessionConnectionCleanupJob, error) { const op = "session.newNewSessionConnectionCleanupJob" switch { case writer == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing db writer") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing db writer") case gracePeriod == nil: - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing grace period") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing grace period") case gracePeriod.Load() == 0: - return nil, errors.New(ctx, errors.InvalidParameter, op, "grace period is zero") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "grace period is zero") } return &sessionConnectionCleanupJob{ diff --git a/internal/session/job_session_cleanup_test.go b/internal/session/job_session_cleanup_test.go index 68a1182e09c..5027d1f01a5 100644 --- a/internal/session/job_session_cleanup_test.go +++ b/internal/session/job_session_cleanup_test.go @@ -40,7 +40,7 @@ func TestSessionConnectionCleanupJob(t *testing.T) { wrapper := db.TestWrapper(t) iamRepo := iam.TestRepo(t, conn, wrapper) kms := kms.TestKms(t, conn, wrapper) - serversRepo, err := server.NewRepository(ctx, rw, rw, kms) + serversRepo, err := server.NewRepository(rw, rw, kms) require.NoError(err) sessionRepo, err := NewRepository(ctx, rw, rw, kms) require.NoError(err) @@ -106,7 +106,7 @@ func TestSessionConnectionCleanupJob(t *testing.T) { } // Create the job. - job, err := newSessionConnectionCleanupJob(ctx, rw, gracePeriod) + job, err := newSessionConnectionCleanupJob(rw, gracePeriod) job.gracePeriod = gracePeriod // by-pass factory assert so we dont have to wait so long require.NoError(err) @@ -151,14 +151,14 @@ func TestSessionConnectionCleanupJob(t *testing.T) { func TestSessionConnectionCleanupJobNewJobErr(t *testing.T) { t.Parallel() - ctx := context.Background() + ctx := context.TODO() const op = "session.newNewSessionConnectionCleanupJob" require := require.New(t) grace := new(atomic.Int64) grace.Store(1000000) - job, err := newSessionConnectionCleanupJob(ctx, nil, grace) + job, err := newSessionConnectionCleanupJob(nil, grace) require.Equal(err, errors.E( ctx, errors.WithCode(errors.InvalidParameter), @@ -170,7 +170,7 @@ func TestSessionConnectionCleanupJobNewJobErr(t *testing.T) { conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) - job, err = newSessionConnectionCleanupJob(ctx, rw, nil) + job, err = newSessionConnectionCleanupJob(rw, nil) require.Equal(err, errors.E( ctx, errors.WithCode(errors.InvalidParameter), @@ -179,7 +179,7 @@ func TestSessionConnectionCleanupJobNewJobErr(t *testing.T) { )) require.Nil(job) - job, err = newSessionConnectionCleanupJob(ctx, rw, new(atomic.Int64)) + job, err = newSessionConnectionCleanupJob(rw, new(atomic.Int64)) require.Equal(err, errors.E( ctx, errors.WithCode(errors.InvalidParameter), @@ -203,13 +203,13 @@ func TestCloseConnectionsForDeadWorkers(t *testing.T) { gracePeriod := 1 * time.Second connRepo, err := NewConnectionRepository(ctx, rw, rw, kms) require.NoError(err) - serversRepo, err := server.NewRepository(ctx, rw, rw, kms) + serversRepo, err := server.NewRepository(rw, rw, kms) require.NoError(err) defaultLiveness := new(atomic.Int64) defaultLiveness.Store(int64(server.DefaultLiveness)) - job, err := newSessionConnectionCleanupJob(ctx, rw, defaultLiveness) + job, err := newSessionConnectionCleanupJob(rw, defaultLiveness) require.NoError(err) // connection count = 6 * states(authorized, connected, closed = 3) * servers_with_open_connections(3) @@ -472,7 +472,7 @@ func TestCloseWorkerlessConnections(t *testing.T) { hourDuration := new(atomic.Int64) hourDuration.Store(int64(time.Hour)) - job, err := newSessionConnectionCleanupJob(ctx, rw, hourDuration) + job, err := newSessionConnectionCleanupJob(rw, hourDuration) require.NoError(err) createConnection := func(workerId string) *Connection { diff --git a/internal/session/jobs.go b/internal/session/jobs.go index 313de4fbd0b..a8b37ce096c 100644 --- a/internal/session/jobs.go +++ b/internal/session/jobs.go @@ -25,7 +25,7 @@ func RegisterJobs(ctx context.Context, scheduler *scheduler.Scheduler, w db.Writ return errors.New(ctx, errors.InvalidParameter, op, "nil grace period") } - sessionConnectionCleanupJob, err := newSessionConnectionCleanupJob(ctx, w, gracePeriod) + sessionConnectionCleanupJob, err := newSessionConnectionCleanupJob(w, gracePeriod) if err != nil { return fmt.Errorf("error creating session cleanup job: %w", err) } diff --git a/internal/session/repository_connection.go b/internal/session/repository_connection.go index 4efd8b9c0b7..474989fc127 100644 --- a/internal/session/repository_connection.go +++ b/internal/session/repository_connection.go @@ -139,7 +139,7 @@ func (r *ConnectionRepository) AuthorizeConnection(ctx context.Context, sessionI if sessionId == "" { return nil, nil, errors.Wrap(ctx, status.Error(codes.FailedPrecondition, "missing session id"), op, errors.WithCode(errors.InvalidParameter)) } - connectionId, err := newConnectionId(ctx) + connectionId, err := newConnectionId() if err != nil { return nil, nil, errors.Wrap(ctx, err, op) } @@ -234,7 +234,7 @@ func (r *ConnectionRepository) ListConnectionsBySessionId(ctx context.Context, s func (r *ConnectionRepository) ConnectConnection(ctx context.Context, c ConnectWith) (*Connection, []*ConnectionState, error) { const op = "session.(ConnectionRepository).ConnectConnection" // ConnectWith.validate will check all the fields... - if err := c.validate(ctx); err != nil { + if err := c.validate(); err != nil { return nil, nil, errors.Wrap(ctx, err, op) } var connection Connection @@ -266,7 +266,7 @@ func (r *ConnectionRepository) ConnectConnection(ctx context.Context, c ConnectW // return err, which will result in a rollback of the update return errors.New(ctx, errors.MultipleRecords, op, "more than 1 resource would have been updated") } - newState, err := NewConnectionState(ctx, connection.PublicId, StatusConnected) + newState, err := NewConnectionState(connection.PublicId, StatusConnected) if err != nil { return errors.Wrap(ctx, err, op) } @@ -302,7 +302,7 @@ func (r *ConnectionRepository) closeConnections(ctx context.Context, closeWith [ return nil, errors.New(ctx, errors.InvalidParameter, op, "missing connections") } for _, cw := range closeWith { - if err := cw.validate(ctx); err != nil { + if err := cw.validate(); err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg(fmt.Sprintf("%s was invalid", cw.ConnectionId))) } } diff --git a/internal/session/repository_connection_test.go b/internal/session/repository_connection_test.go index a12e9c83e71..3321987ee82 100644 --- a/internal/session/repository_connection_test.go +++ b/internal/session/repository_connection_test.go @@ -310,7 +310,7 @@ func TestRepository_DeleteConnection(t *testing.T) { args: args{ connection: func() *Connection { c := AllocConnection() - id, err := newConnectionId(ctx) + id, err := newConnectionId() require.NoError(t, err) c.PublicId = id return &c @@ -580,7 +580,7 @@ func TestUpdateBytesUpDown(t *testing.T) { cws := make([]CloseWith, 0, len(conns)) for i := 0; i < len(conns); i++ { conns[i].ClosedReason = closeReasons[rand.Intn(len(closeReasons))].String() - cr, err := convertToClosedReason(ctx, conns[i].ClosedReason) + cr, err := convertToClosedReason(conns[i].ClosedReason) require.NoError(t, err) cws = append(cws, CloseWith{ diff --git a/internal/session/repository_session.go b/internal/session/repository_session.go index 07ad00a522b..5f6860b4ca7 100644 --- a/internal/session/repository_session.go +++ b/internal/session/repository_session.go @@ -63,7 +63,7 @@ func (r *Repository) CreateSession(ctx context.Context, sessionWrapper wrapping. return nil, errors.New(ctx, errors.InvalidParameter, op, "missing addresses") } - id, err := newId(ctx) + id, err := newId() if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -93,7 +93,7 @@ func (r *Repository) CreateSession(ctx context.Context, sessionWrapper wrapping. } if newSession.HostSetId != "" && newSession.HostId != "" { - hs, err := NewSessionHostSetHost(ctx, newSession.PublicId, newSession.HostSetId, newSession.HostId) + hs, err := NewSessionHostSetHost(newSession.PublicId, newSession.HostSetId, newSession.HostId) if err != nil { return errors.Wrap(ctx, err, op) } @@ -103,7 +103,7 @@ func (r *Repository) CreateSession(ctx context.Context, sessionWrapper wrapping. returnedSession.HostSetId = hs.HostSetId returnedSession.HostId = hs.HostId } else if newSession.Endpoint != "" { - ta, err := NewSessionTargetAddress(ctx, newSession.PublicId, newSession.TargetId) + ta, err := NewSessionTargetAddress(newSession.PublicId, newSession.TargetId) if err != nil { return errors.Wrap(ctx, err, op) } diff --git a/internal/session/repository_session_test.go b/internal/session/repository_session_test.go index c87a71e0589..c0949055354 100644 --- a/internal/session/repository_session_test.go +++ b/internal/session/repository_session_test.go @@ -1033,7 +1033,7 @@ func TestRepository_CancelSession(t *testing.T) { acct := password.TestAccount(t, conn, authMethod.GetPublicId(), "name1") user := iam.TestUser(t, iamRepo, org.PublicId, iam.WithAccountIds(acct.PublicId)) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, testKms) + authTokenRepo, err := authtoken.NewRepository(rw, rw, testKms) require.NoError(t, err) at, err := authTokenRepo.CreateAuthToken(ctx, user, acct.GetPublicId()) require.NoError(t, err) @@ -1074,7 +1074,7 @@ func TestRepository_CancelSession(t *testing.T) { name: "bad-session-id", session: setupFn(), overrideSessionId: func() *string { - id, err := newId(ctx) + id, err := newId() require.NoError(t, err) return &id }(), @@ -1375,7 +1375,7 @@ func TestRepository_ActivateSession(t *testing.T) { name: "bad-session-id", session: TestDefaultSession(t, conn, wrapper, iamRepo), overrideSessionId: func() *string { - id, err := newId(ctx) + id, err := newId() require.NoError(t, err) return &id }(), @@ -1507,7 +1507,7 @@ func TestRepository_DeleteSession(t *testing.T) { args: args{ session: func() *Session { s := TestDefaultSession(t, conn, wrapper, iamRepo) - id, err := newId(ctx) + id, err := newId() require.NoError(t, err) s.PublicId = id return s diff --git a/internal/session/repository_test.go b/internal/session/repository_test.go index 32f0c4af93b..cb37cbd7641 100644 --- a/internal/session/repository_test.go +++ b/internal/session/repository_test.go @@ -125,7 +125,7 @@ func TestRepository_convertToSessions(t *testing.T) { repo, err := NewRepository(ctx, rw, rw, kmsCache) require.NoError(t, err) composedOf := TestSessionParams(t, conn, rootWrapper, iamRepo) - sess, err := New(ctx, composedOf) + sess, err := New(composedOf) require.NoError(t, err) sessionWrapper, err := kmsCache.GetWrapper(ctx, sess.ProjectId, kms.KeyPurposeSessions) require.NoError(t, err) diff --git a/internal/session/service_authorize_connection_test.go b/internal/session/service_authorize_connection_test.go index ef8bf4a7548..ee5698e301c 100644 --- a/internal/session/service_authorize_connection_test.go +++ b/internal/session/service_authorize_connection_test.go @@ -102,7 +102,7 @@ func TestService_AuthorizeConnection(t *testing.T) { acct := password.TestAccount(t, conn, authMethod.GetPublicId(), "name1") user := iam.TestUser(t, iamRepo, org.PublicId, iam.WithAccountIds(acct.PublicId)) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, testKms) + authTokenRepo, err := authtoken.NewRepository(rw, rw, testKms) require.NoError(t, err) at, err := authTokenRepo.CreateAuthToken(ctx, user, acct.GetPublicId()) require.NoError(t, err) diff --git a/internal/session/service_close_connections_test.go b/internal/session/service_close_connections_test.go index 2e05f8c769c..59bcb23387a 100644 --- a/internal/session/service_close_connections_test.go +++ b/internal/session/service_close_connections_test.go @@ -63,7 +63,7 @@ func TestServiceCloseConnections(t *testing.T) { acct := password.TestAccount(t, conn, authMethod.GetPublicId(), "name1") user := iam.TestUser(t, iamRepo, org.PublicId, iam.WithAccountIds(acct.PublicId)) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, testKms) + authTokenRepo, err := authtoken.NewRepository(rw, rw, testKms) require.NoError(t, err) at, err := authTokenRepo.CreateAuthToken(ctx, user, acct.GetPublicId()) require.NoError(t, err) diff --git a/internal/session/service_worker_status_report_test.go b/internal/session/service_worker_status_report_test.go index 48ee2d84f04..638e5c1b6ea 100644 --- a/internal/session/service_worker_status_report_test.go +++ b/internal/session/service_worker_status_report_test.go @@ -29,7 +29,7 @@ func TestWorkerStatusReport(t *testing.T) { kms := kms.TestKms(t, conn, wrapper) org, prj := iam.TestScopes(t, iam.TestRepo(t, conn, wrapper)) - serverRepo, _ := server.NewRepository(ctx, rw, rw, kms) + serverRepo, _ := server.NewRepository(rw, rw, kms) _, err := serverRepo.UpsertController(ctx, &store.Controller{ PrivateId: "test_controller1", Address: "127.0.0.1", diff --git a/internal/session/session.go b/internal/session/session.go index b3cb46c2a30..9ecc05e6325 100644 --- a/internal/session/session.go +++ b/internal/session/session.go @@ -159,7 +159,7 @@ var ( ) // New creates a new in memory session. -func New(ctx context.Context, c ComposedOf, _ ...Option) (*Session, error) { +func New(c ComposedOf, _ ...Option) (*Session, error) { const op = "session.New" s := Session{ UserId: c.UserId, @@ -178,8 +178,8 @@ func New(ctx context.Context, c ComposedOf, _ ...Option) (*Session, error) { StaticCredentials: c.StaticCredentials, ProtocolWorkerId: c.ProtocolWorkerId, } - if err := s.validateNewSession(ctx); err != nil { - return nil, errors.Wrap(ctx, err, op) + if err := s.validateNewSession(); err != nil { + return nil, errors.WrapDeprecated(err, op) } return &s, nil } @@ -287,7 +287,7 @@ func (s *Session) VetForWrite(ctx context.Context, _ db.Reader, opType db.OpType } switch opType { case db.CreateOp: - if err := s.validateNewSession(ctx); err != nil { + if err := s.validateNewSession(); err != nil { return errors.Wrap(ctx, err, op) } if len(s.Certificate) == 0 { @@ -332,7 +332,7 @@ func (s *Session) VetForWrite(ctx context.Context, _ db.Reader, opType db.OpType case contains(opts.WithFieldMaskPaths, "ProtocolWorkerId"): return errors.New(ctx, errors.InvalidParameter, op, "protocol worker id is immutable") case contains(opts.WithFieldMaskPaths, "TerminationReason"): - if _, err := convertToReason(ctx, s.TerminationReason); err != nil { + if _, err := convertToReason(s.TerminationReason); err != nil { return errors.Wrap(ctx, err, op) } } @@ -356,34 +356,34 @@ func (s *Session) SetTableName(n string) { } // validateNewSession checks everything but the session's PublicId -func (s *Session) validateNewSession(ctx context.Context) error { +func (s *Session) validateNewSession() error { const op = "session.(Session).validateNewSession" if s.UserId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing user id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing user id") } if s.TargetId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing target id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing target id") } if s.AuthTokenId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing auth token id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing auth token id") } if s.ProjectId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing project id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing project id") } if s.Endpoint == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing endpoint") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing endpoint") } if s.ExpirationTime.GetTimestamp().AsTime().IsZero() { - return errors.New(ctx, errors.InvalidParameter, op, "missing expiration time") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing expiration time") } if s.TerminationReason != "" { - return errors.New(ctx, errors.InvalidParameter, op, "termination reason must be empty") + return errors.NewDeprecated(errors.InvalidParameter, op, "termination reason must be empty") } if s.TofuToken != nil { - return errors.New(ctx, errors.InvalidParameter, op, "tofu token must be empty") + return errors.NewDeprecated(errors.InvalidParameter, op, "tofu token must be empty") } if s.CtTofuToken != nil { - return errors.New(ctx, errors.InvalidParameter, op, "ct must be empty") + return errors.NewDeprecated(errors.InvalidParameter, op, "ct must be empty") } // It is okay for the worker filter and protocol worker ID to be empty, so // they are not checked here. diff --git a/internal/session/session_connect_with.go b/internal/session/session_connect_with.go index d6908ecd267..c76239dbaa3 100644 --- a/internal/session/session_connect_with.go +++ b/internal/session/session_connect_with.go @@ -4,8 +4,6 @@ package session import ( - "context" - "github.com/hashicorp/boundary/internal/errors" ) @@ -20,25 +18,25 @@ type ConnectWith struct { UserClientIp string } -func (c ConnectWith) validate(ctx context.Context) error { +func (c ConnectWith) validate() error { const op = "session.(ConnectWith).validate" if c.ConnectionId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing session id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing session id") } if c.ClientTcpAddress == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing client tcp address") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing client tcp address") } if c.ClientTcpPort == 0 { - return errors.New(ctx, errors.InvalidParameter, op, "missing client ctp port") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing client ctp port") } if c.EndpointTcpAddress == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing endpoint tcp address") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing endpoint tcp address") } if c.EndpointTcpPort == 0 { - return errors.New(ctx, errors.InvalidParameter, op, "missing endpoint ctp port") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing endpoint ctp port") } if c.UserClientIp == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing user client ip") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing user client ip") } return nil } diff --git a/internal/session/session_connect_with_test.go b/internal/session/session_connect_with_test.go index 38aa154aa32..b1db9bd43cf 100644 --- a/internal/session/session_connect_with_test.go +++ b/internal/session/session_connect_with_test.go @@ -4,15 +4,13 @@ package session import ( - "context" "testing" "github.com/stretchr/testify/require" ) func TestConnectWith_validate(t *testing.T) { - ctx := context.Background() - id, err := newId(ctx) + id, err := newId() require.NoError(t, err) type fields struct { @@ -111,7 +109,7 @@ func TestConnectWith_validate(t *testing.T) { EndpointTcpPort: tt.fields.EndpointTcpPort, UserClientIp: tt.fields.UserClientIp, } - if err := c.validate(ctx); (err != nil) != tt.wantErr { + if err := c.validate(); (err != nil) != tt.wantErr { t.Errorf("ConnectWith.validate() error = %v, wantErr %v", err, tt.wantErr) } }) diff --git a/internal/session/session_host_set_host.go b/internal/session/session_host_set_host.go index cdba69f936f..2f692907fe3 100644 --- a/internal/session/session_host_set_host.go +++ b/internal/session/session_host_set_host.go @@ -3,11 +3,7 @@ package session -import ( - "context" - - "github.com/hashicorp/boundary/internal/errors" -) +import "github.com/hashicorp/boundary/internal/errors" const ( defaultSessionHostSetHostTableName = "session_host_set_host" @@ -26,16 +22,16 @@ type SessionHostSetHost struct { } // NewSessionHostSetHost creates a new in memory session to host set & host association. -func NewSessionHostSetHost(ctx context.Context, sessionId, hostSetId, hostId string) (*SessionHostSetHost, error) { +func NewSessionHostSetHost(sessionId, hostSetId, hostId string) (*SessionHostSetHost, error) { const op = "session.NewSessionHostSetHost" if sessionId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing session id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing session id") } if hostSetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing host set id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing host set id") } if hostId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing host id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing host id") } shs := &SessionHostSetHost{ SessionId: sessionId, diff --git a/internal/session/session_target_address.go b/internal/session/session_target_address.go index 2a19ba1d6b5..028e58c5268 100644 --- a/internal/session/session_target_address.go +++ b/internal/session/session_target_address.go @@ -3,11 +3,7 @@ package session -import ( - "context" - - "github.com/hashicorp/boundary/internal/errors" -) +import "github.com/hashicorp/boundary/internal/errors" const ( defaultSessionTargetAddressTableName = "session_target_address" @@ -24,13 +20,13 @@ type SessionTargetAddress struct { } // NewSessionTargetAddress creates a new in memory session target address. -func NewSessionTargetAddress(ctx context.Context, sessionId, targetId string) (*SessionTargetAddress, error) { +func NewSessionTargetAddress(sessionId, targetId string) (*SessionTargetAddress, error) { const op = "sesssion.NewSessionTargetAddress" if sessionId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing session id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing session id") } if targetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing target id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing target id") } sta := &SessionTargetAddress{ SessionId: sessionId, diff --git a/internal/session/session_test.go b/internal/session/session_test.go index 17df20858fd..b1ac967534d 100644 --- a/internal/session/session_test.go +++ b/internal/session/session_test.go @@ -193,7 +193,7 @@ func TestSession_Create(t *testing.T) { t.Run(tt.name, func(t *testing.T) { ctx := context.Background() assert, require := assert.New(t), require.New(t) - got, err := New(ctx, tt.args.composedOf) + got, err := New(tt.args.composedOf) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -202,7 +202,7 @@ func TestSession_Create(t *testing.T) { require.NoError(err) assert.Equal(tt.want, got) if tt.create { - id, err := db.NewPublicId(ctx, globals.SessionPrefix) + id, err := db.NewPublicId(globals.SessionPrefix) require.NoError(err) got.PublicId = id privKey, certBytes, err := newCert(ctx, id, tt.args.addresses, composedOf.ExpirationTime.Timestamp.AsTime(), rand.Reader) @@ -235,7 +235,6 @@ func TestSession_Create(t *testing.T) { func TestSession_Delete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -258,7 +257,7 @@ func TestSession_Delete(t *testing.T) { name: "bad-id", session: func() *Session { s := AllocSession() - id, err := db.NewPublicId(ctx, globals.SessionPrefix) + id, err := db.NewPublicId(globals.SessionPrefix) require.NoError(t, err) s.PublicId = id return &s @@ -272,7 +271,7 @@ func TestSession_Delete(t *testing.T) { assert, require := assert.New(t), require.New(t) deleteSession := AllocSession() deleteSession.PublicId = tt.session.PublicId - deletedRows, err := rw.Delete(ctx, &deleteSession) + deletedRows, err := rw.Delete(context.Background(), &deleteSession) if tt.wantErr { require.Error(err) return diff --git a/internal/session/state.go b/internal/session/state.go index 634d8d9a96f..43226dcd15b 100644 --- a/internal/session/state.go +++ b/internal/session/state.go @@ -71,15 +71,15 @@ var ( // NewState creates a new in memory session state. No options // are currently supported. -func NewState(ctx context.Context, session_id string, state Status, _ ...Option) (*State, error) { +func NewState(session_id string, state Status, _ ...Option) (*State, error) { const op = "session.NewState" s := State{ SessionId: session_id, Status: state, } - if err := s.validate(ctx); err != nil { - return nil, errors.Wrap(ctx, err, op) + if err := s.validate(); err != nil { + return nil, errors.WrapDeprecated(err, op) } return &s, nil } @@ -127,7 +127,7 @@ func (s *State) Clone() any { // before it's written. func (s *State) VetForWrite(ctx context.Context, _ db.Reader, _ db.OpType, _ ...db.Option) error { const op = "session.(State).VetForWrite" - if err := s.validate(ctx); err != nil { + if err := s.validate(); err != nil { return errors.Wrap(ctx, err, op) } return nil @@ -149,22 +149,22 @@ func (s *State) SetTableName(n string) { } // validate checks the session state -func (s *State) validate(ctx context.Context) error { +func (s *State) validate() error { const op = "session.(State).validate" if s.Status == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing status") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing status") } if s.SessionId == "" { - return errors.New(ctx, errors.InvalidParameter, op, "missing session id") + return errors.NewDeprecated(errors.InvalidParameter, op, "missing session id") } if s.StartTime != nil { - return errors.New(ctx, errors.InvalidParameter, op, "start time is not settable") + return errors.NewDeprecated(errors.InvalidParameter, op, "start time is not settable") } if s.EndTime != nil { - return errors.New(ctx, errors.InvalidParameter, op, "end time is not settable") + return errors.NewDeprecated(errors.InvalidParameter, op, "end time is not settable") } if s.PreviousEndTime != nil { - return errors.New(ctx, errors.InvalidParameter, op, "previous end time is not settable") + return errors.NewDeprecated(errors.InvalidParameter, op, "previous end time is not settable") } return nil } diff --git a/internal/session/state_test.go b/internal/session/state_test.go index c081958c2ee..f8fd6366b0a 100644 --- a/internal/session/state_test.go +++ b/internal/session/state_test.go @@ -67,7 +67,7 @@ func TestState_Create(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := NewState(context.Background(), tt.args.sessionId, tt.args.status) + got, err := NewState(tt.args.sessionId, tt.args.status) if tt.wantErr { require.Error(err) assert.True(errors.Match(errors.T(tt.wantIsErr), err)) @@ -90,7 +90,6 @@ func TestState_Create(t *testing.T) { func TestState_Delete(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") rw := db.New(conn) wrapper := db.TestWrapper(t) @@ -116,7 +115,7 @@ func TestState_Delete(t *testing.T) { name: "bad-id", state: TestState(t, conn, session2.PublicId, StatusTerminated), deleteStateId: func() string { - id, err := db.NewPublicId(ctx, StatePrefix) + id, err := db.NewPublicId(StatePrefix) require.NoError(t, err) return id }(), @@ -129,7 +128,7 @@ func TestState_Delete(t *testing.T) { assert, require := assert.New(t), require.New(t) var initialState State - err := rw.LookupWhere(ctx, &initialState, "session_id = ? and state = ?", []any{tt.state.SessionId, tt.state.Status}) + err := rw.LookupWhere(context.Background(), &initialState, "session_id = ? and state = ?", []any{tt.state.SessionId, tt.state.Status}) require.NoError(err) deleteState := allocState() @@ -139,7 +138,7 @@ func TestState_Delete(t *testing.T) { deleteState.SessionId = tt.state.SessionId } deleteState.StartTime = initialState.StartTime - deletedRows, err := rw.Delete(ctx, &deleteState) + deletedRows, err := rw.Delete(context.Background(), &deleteState) if tt.wantErr { require.Error(err) return @@ -151,7 +150,7 @@ func TestState_Delete(t *testing.T) { } assert.Equal(tt.wantRowsDeleted, deletedRows) foundState := allocState() - err = rw.LookupWhere(ctx, &foundState, "session_id = ? and start_time = ?", []any{tt.state.SessionId, initialState.StartTime}) + err = rw.LookupWhere(context.Background(), &foundState, "session_id = ? and start_time = ?", []any{tt.state.SessionId, initialState.StartTime}) require.Error(err) assert.True(errors.IsNotFoundError(err)) }) diff --git a/internal/session/term_reason.go b/internal/session/term_reason.go index 7fac1733ebf..b9ed6843047 100644 --- a/internal/session/term_reason.go +++ b/internal/session/term_reason.go @@ -4,7 +4,6 @@ package session import ( - "context" "fmt" "github.com/hashicorp/boundary/internal/errors" @@ -29,7 +28,7 @@ func (r TerminationReason) String() string { return string(r) } -func convertToReason(ctx context.Context, s string) (TerminationReason, error) { +func convertToReason(s string) (TerminationReason, error) { const op = "session.convertToReason" switch s { case UnknownTermination.String(): @@ -47,6 +46,6 @@ func convertToReason(ctx context.Context, s string) (TerminationReason, error) { case ConnectionLimit.String(): return ConnectionLimit, nil default: - return "", errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("%s is not a valid reason", s)) + return "", errors.NewDeprecated(errors.InvalidParameter, op, fmt.Sprintf("%s is not a valid reason", s)) } } diff --git a/internal/session/testing.go b/internal/session/testing.go index d275176f8c5..56fdca087e6 100644 --- a/internal/session/testing.go +++ b/internal/session/testing.go @@ -29,20 +29,19 @@ import ( // TestConnection creates a test connection for the sessionId in the repository. func TestConnection(t testing.TB, conn *db.DB, sessionId, clientTcpAddr string, clientTcpPort uint32, endpointTcpAddr string, endpointTcpPort uint32, userClientIp string) *Connection { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) - c, err := NewConnection(ctx, sessionId, clientTcpAddr, clientTcpPort, endpointTcpAddr, endpointTcpPort, userClientIp) + c, err := NewConnection(sessionId, clientTcpAddr, clientTcpPort, endpointTcpAddr, endpointTcpPort, userClientIp) require.NoError(err) - id, err := newConnectionId(ctx) + id, err := newConnectionId() require.NoError(err) c.PublicId = id - err = rw.Create(ctx, c) + err = rw.Create(context.Background(), c) require.NoError(err) - connectedState, err := NewConnectionState(ctx, c.PublicId, StatusConnected) + connectedState, err := NewConnectionState(c.PublicId, StatusConnected) require.NoError(err) - err = rw.Create(ctx, connectedState) + err = rw.Create(context.Background(), connectedState) require.NoError(err) return c } @@ -50,10 +49,9 @@ func TestConnection(t testing.TB, conn *db.DB, sessionId, clientTcpAddr string, // TestConnectionState creates a test connection state for the connectionId in the repository. func TestConnectionState(t testing.TB, conn *db.DB, connectionId string, state ConnectionStatus) *ConnectionState { t.Helper() - ctx := context.Background() require := require.New(t) rw := db.New(conn) - s, err := NewConnectionState(ctx, connectionId, state) + s, err := NewConnectionState(connectionId, state) require.NoError(err) err = rw.Create(context.Background(), s) require.NoError(err) @@ -65,7 +63,7 @@ func TestState(t testing.TB, conn *db.DB, sessionId string, state Status) *State t.Helper() require := require.New(t) rw := db.New(conn) - s, err := NewState(context.Background(), sessionId, state) + s, err := NewState(sessionId, state) require.NoError(err) err = rw.Create(context.Background(), s) require.NoError(err) @@ -77,7 +75,7 @@ func TestSessionHostSetHost(t testing.TB, conn *db.DB, sessionId, hostSetId, hos t.Helper() require := require.New(t) rw := db.New(conn) - hs, err := NewSessionHostSetHost(context.Background(), sessionId, hostSetId, hostId) + hs, err := NewSessionHostSetHost(sessionId, hostSetId, hostId) require.NoError(err) err = rw.Create(context.Background(), hs) require.NoError(err) @@ -88,7 +86,7 @@ func TestSessionTargetAddress(t testing.TB, conn *db.DB, sessionId, targetId str t.Helper() require := require.New(t) rw := db.New(conn) - ta, err := NewSessionTargetAddress(context.Background(), sessionId, targetId) + ta, err := NewSessionTargetAddress(sessionId, targetId) require.NoError(err) err = rw.Create(context.Background(), ta) require.NoError(err) @@ -106,9 +104,9 @@ func TestSession(t testing.TB, conn *db.DB, rootWrapper wrapping.Wrapper, c Comp c.ExpirationTime = ×tamp.Timestamp{Timestamp: future} } rw := db.New(conn) - s, err := New(ctx, c, opt...) + s, err := New(c, opt...) require.NoError(err) - id, err := newId(ctx) + id, err := newId() require.NoError(err) s.PublicId = id kmsCache := kms.TestKms(t, conn, rootWrapper) @@ -184,7 +182,7 @@ func TestSessionTargetAddressParams(t testing.TB, conn *db.DB, wrapper wrapping. acct := password.TestAccount(t, conn, authMethod.GetPublicId(), "name1") user := iam.TestUser(t, iamRepo, org.PublicId, iam.WithAccountIds(acct.PublicId)) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, kms) + authTokenRepo, err := authtoken.NewRepository(rw, rw, kms) require.NoError(err) at, err := authTokenRepo.CreateAuthToken(ctx, user, acct.GetPublicId()) require.NoError(err) @@ -238,7 +236,7 @@ func TestSessionParams(t testing.TB, conn *db.DB, wrapper wrapping.Wrapper, iamR acct := password.TestAccount(t, conn, authMethod.GetPublicId(), "name1") user := iam.TestUser(t, iamRepo, org.PublicId, iam.WithAccountIds(acct.PublicId)) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, kms) + authTokenRepo, err := authtoken.NewRepository(rw, rw, kms) require.NoError(err) at, err := authTokenRepo.CreateAuthToken(ctx, user, acct.GetPublicId()) require.NoError(err) diff --git a/internal/session/testing_test.go b/internal/session/testing_test.go index 1098b7a72d2..20c24c66fd5 100644 --- a/internal/session/testing_test.go +++ b/internal/session/testing_test.go @@ -83,7 +83,7 @@ func Test_TestWorker(t *testing.T) { func Test_TestCert(t *testing.T) { assert, require := assert.New(t), require.New(t) - sessionId, err := newId(context.Background()) + sessionId, err := newId() require.NoError(err) key, cert, err := TestCert(sessionId) require.NoError(err) diff --git a/internal/session/util.go b/internal/session/util.go index e9012339c3c..25ae79f794a 100644 --- a/internal/session/util.go +++ b/internal/session/util.go @@ -24,12 +24,12 @@ func DeriveED25519Key(ctx context.Context, wrapper wrapping.Wrapper, userId, job jId = []byte(jobId) } if wrapper == nil { - return nil, nil, errors.New(ctx, errors.InvalidParameter, op, "missing wrapper") + return nil, nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing wrapper") } reader, err := crypto.NewDerivedReader(ctx, wrapper, 32, uId, jId) if err != nil { - return nil, nil, errors.Wrap(ctx, err, op) + return nil, nil, errors.WrapDeprecated(err, op) } return ed25519.GenerateKey(reader) } diff --git a/internal/storage/plugin/ids.go b/internal/storage/plugin/ids.go index cca5b28fb2f..74f4d6ee453 100644 --- a/internal/storage/plugin/ids.go +++ b/internal/storage/plugin/ids.go @@ -26,7 +26,7 @@ const ( func newStorageBucketId(ctx context.Context) (string, error) { const op = "plugin.newStorageBucketId" - id, err := db.NewPublicId(ctx, globals.PluginStorageBucketPrefix) + id, err := db.NewPublicId(globals.PluginStorageBucketPrefix) if err != nil { return "", errors.Wrap(ctx, err, op) } diff --git a/internal/target/address.go b/internal/target/address.go index 8600ca7bf02..a4d004eef1b 100644 --- a/internal/target/address.go +++ b/internal/target/address.go @@ -33,13 +33,13 @@ var ( // NewAddress creates a new in memory address. No options are // currently supported. -func NewAddress(ctx context.Context, targetId, address string, _ ...Option) (*Address, error) { +func NewAddress(targetId, address string, _ ...Option) (*Address, error) { const op = "target.NewAddress" if targetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing target id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing target id") } if address == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing address") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing address") } address = strings.TrimSpace(address) t := &Address{ diff --git a/internal/target/address_test.go b/internal/target/address_test.go index aaeb5ca39ba..352d0d8ae01 100644 --- a/internal/target/address_test.go +++ b/internal/target/address_test.go @@ -4,7 +4,6 @@ package target_test import ( - "context" "testing" "github.com/hashicorp/boundary/internal/errors" @@ -57,7 +56,7 @@ func TestAddress_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := target.NewAddress(context.Background(), tt.args.targetId, tt.args.address) + got, err := target.NewAddress(tt.args.targetId, tt.args.address) if tt.wantErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err) assert.Nil(got) diff --git a/internal/target/credential.go b/internal/target/credential.go index d59d6c78f17..0b257b7495a 100644 --- a/internal/target/credential.go +++ b/internal/target/credential.go @@ -4,7 +4,6 @@ package target import ( - "context" "fmt" "github.com/hashicorp/boundary/internal/credential" @@ -23,13 +22,13 @@ type StaticCredential struct { // NewStaticCredential creates a new in memory StaticCredential // representing the relationship between targetId and credentialId. -func NewStaticCredential(ctx context.Context, targetId, credentialId string, purpose credential.Purpose) (*StaticCredential, error) { +func NewStaticCredential(targetId, credentialId string, purpose credential.Purpose) (*StaticCredential, error) { const op = "target.StaticCredential" if targetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no target id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no target id") } if credentialId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no credential id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no credential id") } t := &StaticCredential{ diff --git a/internal/target/credential_library.go b/internal/target/credential_library.go index f33a2e2fea5..da300b74846 100644 --- a/internal/target/credential_library.go +++ b/internal/target/credential_library.go @@ -4,7 +4,6 @@ package target import ( - "context" "fmt" "github.com/hashicorp/boundary/internal/credential" @@ -23,13 +22,13 @@ type CredentialLibrary struct { // NewCredentialLibrary creates a new in memory CredentialLibrary // representing the relationship between targetId and credentialLibraryId. -func NewCredentialLibrary(ctx context.Context, targetId, credentialLibraryId string, purpose credential.Purpose) (*CredentialLibrary, error) { +func NewCredentialLibrary(targetId, credentialLibraryId string, purpose credential.Purpose) (*CredentialLibrary, error) { const op = "target.NewCredentialLibrary" if targetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no target id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no target id") } if credentialLibraryId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "no credential library id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "no credential library id") } t := &CredentialLibrary{ diff --git a/internal/target/credential_library_test.go b/internal/target/credential_library_test.go index fca2cfbe3fe..61e848f35bc 100644 --- a/internal/target/credential_library_test.go +++ b/internal/target/credential_library_test.go @@ -4,7 +4,6 @@ package target_test import ( - "context" "testing" "github.com/hashicorp/boundary/internal/credential" @@ -59,7 +58,7 @@ func TestCredentialLibrary_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := target.NewCredentialLibrary(context.Background(), tt.args.targetId, tt.args.libraryId, credential.BrokeredPurpose) + got, err := target.NewCredentialLibrary(tt.args.targetId, tt.args.libraryId, credential.BrokeredPurpose) if tt.wantErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err) assert.Nil(got) diff --git a/internal/target/credential_test.go b/internal/target/credential_test.go index 1b60b070096..a2c2de98277 100644 --- a/internal/target/credential_test.go +++ b/internal/target/credential_test.go @@ -4,7 +4,6 @@ package target_test import ( - "context" "testing" "github.com/hashicorp/boundary/internal/credential" @@ -59,7 +58,7 @@ func TestStaticCredential_New(t *testing.T) { tt := tt t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - got, err := target.NewStaticCredential(context.Background(), tt.args.targetId, tt.args.credId, credential.BrokeredPurpose) + got, err := target.NewStaticCredential(tt.args.targetId, tt.args.credId, credential.BrokeredPurpose) if tt.wantErr != 0 { assert.Truef(errors.Match(errors.T(tt.wantErr), err), "want err: %q got: %q", tt.wantErr, err) assert.Nil(got) diff --git a/internal/target/host_set.go b/internal/target/host_set.go index bd70e90128d..8ab28634e61 100644 --- a/internal/target/host_set.go +++ b/internal/target/host_set.go @@ -30,13 +30,13 @@ var _ db.VetForWriter = (*TargetHostSet)(nil) // NewTargetHostSet creates a new in memory target host set. No options are // currently supported. -func NewTargetHostSet(ctx context.Context, targetId, hostSetId string, _ ...Option) (*TargetHostSet, error) { +func NewTargetHostSet(targetId, hostSetId string, _ ...Option) (*TargetHostSet, error) { const op = "target.NewTargetHostSet" if targetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing target id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing target id") } if hostSetId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing hostSetId id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing hostSetId id") } t := &TargetHostSet{ TargetHostSet: &store.TargetHostSet{ diff --git a/internal/target/registry.go b/internal/target/registry.go index 690b7e19794..69aca4f5ee7 100644 --- a/internal/target/registry.go +++ b/internal/target/registry.go @@ -15,7 +15,7 @@ import ( const domain = "target" // NewFunc is a function that creates a Target with the provided project and options. -type NewFunc func(ctx context.Context, projectId string, opt ...Option) (Target, error) +type NewFunc func(projectId string, opt ...Option) (Target, error) // AllocFunc is a function that creates an in-memory Target. type AllocFunc func() Target @@ -36,7 +36,7 @@ type VetCredentialSourcesFunc func(context.Context, []*CredentialLibrary, []*Sta // managing target suptypes. type targetHooks interface { // NewTarget creates a new in memory target. - NewTarget(ctx context.Context, projectId string, opt ...Option) (Target, error) + NewTarget(projectId string, opt ...Option) (Target, error) // AllocTarget will allocate an empty target. AllocTarget() Target // Vet validates that the given Target has the proper fields and values @@ -166,7 +166,7 @@ func New(ctx context.Context, subtype subtypes.Subtype, projectId string, opt .. if !ok { return nil, errors.New(ctx, errors.InvalidParameter, op, "unsupported subtype") } - return nf(ctx, projectId, opt...) + return nf(projectId, opt...) } // Register registers repository hooks and the prefixes for a provided Subtype. Register diff --git a/internal/target/repository.go b/internal/target/repository.go index ede646d8d30..c7e8dba4d8e 100644 --- a/internal/target/repository.go +++ b/internal/target/repository.go @@ -413,7 +413,7 @@ func (r *Repository) CreateTarget(ctx context.Context, target Target, opt ...Opt if !ok { return nil, errors.New(ctx, errors.InvalidParameter, op, fmt.Sprintf("unsupported target type %s", target.GetType())) } - id, err := db.NewPublicId(ctx, prefix) + id, err := db.NewPublicId(prefix) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -424,7 +424,7 @@ func (r *Repository) CreateTarget(ctx context.Context, target Target, opt ...Opt var err error if t.GetAddress() != "" { t.SetAddress(strings.TrimSpace(t.GetAddress())) - address, err = NewAddress(ctx, t.GetPublicId(), t.GetAddress()) + address, err = NewAddress(t.GetPublicId(), t.GetAddress()) if err != nil { return nil, errors.Wrap(ctx, err, op) } @@ -617,7 +617,7 @@ func (r *Repository) UpdateTarget(ctx context.Context, target Target, version ui if len(hostSources) > 0 { return errors.New(ctx, errors.Conflict, op, "unable to set address because one or more host sources is assigned to the given target") } - address, err = NewAddress(ctx, t.GetPublicId(), addressEndpoint) + address, err = NewAddress(t.GetPublicId(), addressEndpoint) if err != nil { return errors.Wrap(ctx, err, op) } diff --git a/internal/target/repository_credential_source.go b/internal/target/repository_credential_source.go index a03314fd3ab..56ab0927975 100644 --- a/internal/target/repository_credential_source.go +++ b/internal/target/repository_credential_source.go @@ -494,7 +494,7 @@ func (r *Repository) changes(ctx context.Context, targetId string, ids []string, } switch CredentialSourceType(chg.Type) { case LibraryCredentialSourceType: - lib, err := NewCredentialLibrary(ctx, targetId, chg.SourceId, purpose) + lib, err := NewCredentialLibrary(targetId, chg.SourceId, purpose) if err != nil { return nil, nil, nil, nil, errors.Wrap(ctx, err, op) } @@ -505,7 +505,7 @@ func (r *Repository) changes(ctx context.Context, targetId string, ids []string, addCredLib = append(addCredLib, lib) } case StaticCredentialSourceType: - cred, err := NewStaticCredential(ctx, targetId, chg.SourceId, purpose) + cred, err := NewStaticCredential(targetId, chg.SourceId, purpose) if err != nil { return nil, nil, nil, nil, errors.Wrap(ctx, err, op) } @@ -574,13 +574,13 @@ func (r *Repository) createSources(ctx context.Context, tId string, tSubtype sub for _, id := range ids { switch credTypeById[id] { case LibraryCredentialSourceType: - lib, err := NewCredentialLibrary(ctx, tId, id, purpose) + lib, err := NewCredentialLibrary(tId, id, purpose) if err != nil { return nil, nil, errors.Wrap(ctx, err, op) } credLibs = append(credLibs, lib) case StaticCredentialSourceType: - cred, err := NewStaticCredential(ctx, tId, id, purpose) + cred, err := NewStaticCredential(tId, id, purpose) if err != nil { return nil, nil, errors.Wrap(ctx, err, op) } diff --git a/internal/target/repository_credential_source_test.go b/internal/target/repository_credential_source_test.go index b4c8d233d03..b5246d7699f 100644 --- a/internal/target/repository_credential_source_test.go +++ b/internal/target/repository_credential_source_test.go @@ -24,8 +24,8 @@ import ( type hooks struct{} -func (h hooks) NewTarget(ctx context.Context, projectId string, opt ...target.Option) (target.Target, error) { - return targettest.New(ctx, projectId, opt...) +func (h hooks) NewTarget(projectId string, opt ...target.Option) (target.Target, error) { + return targettest.New(projectId, opt...) } func (h hooks) AllocTarget() target.Target { @@ -45,7 +45,6 @@ func (h hooks) VetCredentialSources(ctx context.Context, cls []*target.Credentia } func TestRepository_SetTargetCredentialSources(t *testing.T) { - ctx := context.Background() target.Register(targettest.Subtype, hooks{}, globals.TcpTargetPrefix) t.Parallel() @@ -80,7 +79,7 @@ func TestRepository_SetTargetCredentialSources(t *testing.T) { ids.BrokeredCredentialIds = append(ids.BrokeredCredentialIds, cred.GetPublicId()) } - target, err := repo.AddTargetCredentialSources(ctx, tar.GetPublicId(), 1, ids) + target, err := repo.AddTargetCredentialSources(context.Background(), tar.GetPublicId(), 1, ids) require.NoError(t, err) credentialSources := target.GetCredentialSources() diff --git a/internal/target/repository_host_source.go b/internal/target/repository_host_source.go index 595112185c9..557956af60e 100644 --- a/internal/target/repository_host_source.go +++ b/internal/target/repository_host_source.go @@ -31,7 +31,7 @@ func (r *Repository) AddTargetHostSources(ctx context.Context, targetId string, } newHostSources := make([]any, 0, len(hostSourceIds)) for _, id := range hostSourceIds { - ths, err := NewTargetHostSet(ctx, targetId, id) + ths, err := NewTargetHostSet(targetId, id) if err != nil { return nil, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory target host set")) } @@ -139,7 +139,7 @@ func (r *Repository) DeleteTargetHostSources(ctx context.Context, targetId strin } deleteTargetHostSources := make([]any, 0, len(hostSourceIds)) for _, id := range hostSourceIds { - ths, err := NewTargetHostSet(ctx, targetId, id) + ths, err := NewTargetHostSet(targetId, id) if err != nil { return db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory target host set")) } @@ -255,7 +255,7 @@ func (r *Repository) SetTargetHostSources(ctx context.Context, targetId string, delete(found, id) continue } - hs, err := NewTargetHostSet(ctx, targetId, id) + hs, err := NewTargetHostSet(targetId, id) if err != nil { return nil, nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg("unable to create in memory target host set")) } @@ -264,7 +264,7 @@ func (r *Repository) SetTargetHostSources(ctx context.Context, targetId string, deleteHostSources := make([]any, 0, len(hostSourceIds)) if len(found) > 0 { for _, s := range found { - hs, err := NewTargetHostSet(ctx, targetId, s.Id()) + hs, err := NewTargetHostSet(targetId, s.Id()) if err != nil { return nil, nil, db.NoRowsAffected, errors.Wrap(ctx, err, op, errors.WithMsg(" unable to create in memory target host set")) } diff --git a/internal/target/targettest/target.go b/internal/target/targettest/target.go index 101ee218a00..37d190bc381 100644 --- a/internal/target/targettest/target.go +++ b/internal/target/targettest/target.go @@ -305,11 +305,11 @@ func VetCredentialSources(_ context.Context, _ []*target.CredentialLibrary, _ [] } // New creates a targettest.Target. -func New(ctx context.Context, projectId string, opt ...target.Option) (target.Target, error) { +func New(projectId string, opt ...target.Option) (target.Target, error) { const op = "target_test.New" opts := target.GetOpts(opt...) if projectId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing project id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing project id") } t := &Target{ Target: &store.Target{ @@ -335,9 +335,9 @@ func TestNewTestTarget(ctx context.Context, t *testing.T, conn *db.DB, projectId opts := target.GetOpts(opt...) require := require.New(t) rw := db.New(conn) - tar, err := New(ctx, projectId, opt...) + tar, err := New(projectId, opt...) require.NoError(err) - id, err := db.NewPublicId(ctx, globals.TcpTargetPrefix) + id, err := db.NewPublicId(globals.TcpTargetPrefix) require.NoError(err) tar.SetPublicId(ctx, id) err = rw.Create(context.Background(), tar) @@ -346,7 +346,7 @@ func TestNewTestTarget(ctx context.Context, t *testing.T, conn *db.DB, projectId if len(opts.WithHostSources) > 0 { newHostSets := make([]any, 0, len(opts.WithHostSources)) for _, s := range opts.WithHostSources { - hostSet, err := target.NewTargetHostSet(ctx, tar.GetPublicId(), s) + hostSet, err := target.NewTargetHostSet(tar.GetPublicId(), s) require.NoError(err) newHostSets = append(newHostSets, hostSet) } diff --git a/internal/target/tcp/exports_test.go b/internal/target/tcp/exports_test.go index 096d0f18066..6d70ecadae1 100644 --- a/internal/target/tcp/exports_test.go +++ b/internal/target/tcp/exports_test.go @@ -4,8 +4,6 @@ package tcp import ( - "context" - "github.com/hashicorp/boundary/internal/target" "github.com/hashicorp/boundary/internal/target/store" ) @@ -20,8 +18,8 @@ var ( // NewTestTarget is a test helper that bypasses the projectId checks // performed by NewTarget, allowing tests to create Targets with // nil projectIds for more robust testing. -func NewTestTarget(ctx context.Context, projectId string, opt ...target.Option) target.Target { - t, _ := targetHooks{}.NewTarget(ctx, "testScope", opt...) +func NewTestTarget(projectId string, opt ...target.Option) target.Target { + t, _ := targetHooks{}.NewTarget("testScope", opt...) t.SetProjectId(projectId) return t } diff --git a/internal/target/tcp/immutable_fields_test.go b/internal/target/tcp/immutable_fields_test.go index 71f46af91c5..c4dff542d95 100644 --- a/internal/target/tcp/immutable_fields_test.go +++ b/internal/target/tcp/immutable_fields_test.go @@ -182,7 +182,7 @@ func TestTargetHostSet_ImmutableFields(t *testing.T) { gotHostSources := gotTarget.GetHostSources() require.NoError(t, err) require.Equal(t, 1, len(gotHostSources)) - new, err := target.NewTargetHostSet(ctx, projTarget.GetPublicId(), gotHostSources[0].Id()) + new, err := target.NewTargetHostSet(projTarget.GetPublicId(), gotHostSources[0].Id()) require.NoError(t, err) tests := []struct { diff --git a/internal/target/tcp/repository_host_source_test.go b/internal/target/tcp/repository_host_source_test.go index 01d0b164db1..85050fb752c 100644 --- a/internal/target/tcp/repository_host_source_test.go +++ b/internal/target/tcp/repository_host_source_test.go @@ -108,10 +108,10 @@ func TestRepository_AddTargetHostSets(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - ctx := context.Background() db.TestDeleteWhere(t, conn, func() any { i := allocTargetHostSet(); return &i }(), "1 = 1") - db.TestDeleteWhere(t, conn, tcp.NewTestTarget(ctx, ""), "1 = 1") + db.TestDeleteWhere(t, conn, tcp.NewTestTarget(""), "1 = 1") + ctx := context.Background() projTarget := tcp.TestTarget(ctx, t, conn, staticProj.PublicId, "static-proj") var address *target.Address diff --git a/internal/target/tcp/repository_tcp_target_test.go b/internal/target/tcp/repository_tcp_target_test.go index 330abb90d30..0deb8c53a07 100644 --- a/internal/target/tcp/repository_tcp_target_test.go +++ b/internal/target/tcp/repository_tcp_target_test.go @@ -132,7 +132,7 @@ func TestRepository_CreateTarget(t *testing.T) { target.WithDefaultPort(uint32(22)), ) require.NoError(t, err) - id, err := db.NewPublicId(ctx, globals.TcpTargetPrefix) + id, err := db.NewPublicId(globals.TcpTargetPrefix) require.NoError(t, err) tar.SetPublicId(ctx, id) return tar @@ -571,7 +571,6 @@ func TestRepository_UpdateTcpTarget(t *testing.T) { } tar := tcp.TestTarget(ctx, t, conn, tt.newProjectId, name, tt.newTargetOpts...) updateTarget := tcp.NewTestTarget( - ctx, tt.args.ProjectId, target.WithName(tt.args.name), target.WithDescription(tt.args.description), diff --git a/internal/target/tcp/repository_test.go b/internal/target/tcp/repository_test.go index 26e1dcf5ff2..aa21ce157f8 100644 --- a/internal/target/tcp/repository_test.go +++ b/internal/target/tcp/repository_test.go @@ -143,7 +143,6 @@ func TestRepository_LookupTarget(t *testing.T) { func TestRepository_ListRoles_Multiple_Scopes(t *testing.T) { t.Parallel() - ctx := context.Background() conn, _ := db.TestSetup(t, "postgres") wrapper := db.TestWrapper(t) testKms := kms.TestKms(t, conn, wrapper) @@ -152,8 +151,9 @@ func TestRepository_ListRoles_Multiple_Scopes(t *testing.T) { _, proj1 := iam.TestScopes(t, iamRepo) _, proj2 := iam.TestScopes(t, iamRepo) - db.TestDeleteWhere(t, conn, tcp.NewTestTarget(ctx, ""), "1=1") + db.TestDeleteWhere(t, conn, tcp.NewTestTarget(""), "1=1") + ctx := context.Background() const numPerScope = 10 var total int for i := 0; i < numPerScope; i++ { @@ -235,7 +235,7 @@ func TestRepository_DeleteTarget(t *testing.T) { name: "not-found", args: args{ target: func() target.Target { - id, err := db.NewPublicId(ctx, globals.TcpTargetPrefix) + id, err := db.NewPublicId(globals.TcpTargetPrefix) require.NoError(t, err) tar, _ := target.New(ctx, tcp.Subtype, proj.PublicId) tar.SetPublicId(ctx, id) diff --git a/internal/target/tcp/target.go b/internal/target/tcp/target.go index 014d2aada1e..55b095eee2a 100644 --- a/internal/target/tcp/target.go +++ b/internal/target/tcp/target.go @@ -46,11 +46,11 @@ var ( // NewTarget creates a new in memory tcp target. WithName, WithDescription and // WithDefaultPort options are supported -func (h targetHooks) NewTarget(ctx context.Context, projectId string, opt ...target.Option) (target.Target, error) { +func (h targetHooks) NewTarget(projectId string, opt ...target.Option) (target.Target, error) { const op = "tcp.NewTarget" opts := target.GetOpts(opt...) if projectId == "" { - return nil, errors.New(ctx, errors.InvalidParameter, op, "missing project id") + return nil, errors.NewDeprecated(errors.InvalidParameter, op, "missing project id") } t := &Target{ Target: &store.Target{ diff --git a/internal/target/tcp/target_test.go b/internal/target/tcp/target_test.go index ef0c04ab0e9..9858b38f37b 100644 --- a/internal/target/tcp/target_test.go +++ b/internal/target/tcp/target_test.go @@ -78,7 +78,7 @@ func TestTarget_Create(t *testing.T) { require.NoError(err) assert.Equal(tt.want, got) if tt.create { - id, err := db.NewPublicId(ctx, globals.TcpTargetPrefix) + id, err := db.NewPublicId(globals.TcpTargetPrefix) require.NoError(err) got.SetPublicId(ctx, id) err = db.New(conn).Create(ctx, got) @@ -119,7 +119,7 @@ func TestTarget_Delete(t *testing.T) { target: func() target.Target { tar, _ := target.New(ctx, tcp.Subtype, proj.PublicId) - id, err := db.NewPublicId(ctx, globals.TcpTargetPrefix) + id, err := db.NewPublicId(globals.TcpTargetPrefix) require.NoError(t, err) tar.SetPublicId(ctx, id) tar.SetName(tcp.TestTargetName(t, proj.PublicId)) @@ -132,7 +132,7 @@ func TestTarget_Delete(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert, require := assert.New(t), require.New(t) - deleteTarget := tcp.NewTestTarget(ctx, "") + deleteTarget := tcp.NewTestTarget("") deleteTarget.SetPublicId(ctx, tt.target.GetPublicId()) deletedRows, err := rw.Delete(context.Background(), deleteTarget) if tt.wantErr { @@ -145,7 +145,7 @@ func TestTarget_Delete(t *testing.T) { return } assert.Equal(tt.wantRowsDeleted, deletedRows) - foundTarget := tcp.NewTestTarget(ctx, "") + foundTarget := tcp.NewTestTarget("") foundTarget.SetPublicId(ctx, tt.target.GetPublicId()) err = rw.LookupById(context.Background(), foundTarget) require.Error(err) @@ -266,7 +266,7 @@ func TestTarget_Update(t *testing.T) { id := tcp.TestId(t) tar := tcp.TestTarget(ctx, t, conn, proj.PublicId, id, target.WithDescription(id)) - updateTarget := tcp.NewTestTarget(ctx, tt.args.ProjectId) + updateTarget := tcp.NewTestTarget(tt.args.ProjectId) updateTarget.SetPublicId(ctx, tar.GetPublicId()) updateTarget.SetName(tt.args.name) updateTarget.SetDescription(tt.args.description) @@ -284,7 +284,7 @@ func TestTarget_Update(t *testing.T) { require.NoError(err) assert.Equal(tt.wantRowsUpdate, updatedRows) assert.NotEqual(tar.GetUpdateTime(), updateTarget.GetUpdateTime()) - foundTarget := tcp.NewTestTarget(ctx, tt.args.ProjectId) + foundTarget := tcp.NewTestTarget(tt.args.ProjectId) foundTarget.SetPublicId(ctx, tar.GetPublicId()) err = rw.LookupByPublicId(ctx, foundTarget) require.NoError(err) diff --git a/internal/target/tcp/testing.go b/internal/target/tcp/testing.go index c65ac1f782c..2c18916ffdd 100644 --- a/internal/target/tcp/testing.go +++ b/internal/target/tcp/testing.go @@ -23,14 +23,14 @@ func TestTarget(ctx context.Context, t testing.TB, conn *db.DB, projectId, name rw := db.New(conn) tar, err := target.New(ctx, Subtype, projectId, opt...) require.NoError(err) - id, err := db.NewPublicId(ctx, TargetPrefix) + id, err := db.NewPublicId(TargetPrefix) require.NoError(err) tar.SetPublicId(ctx, id) - err = rw.Create(ctx, tar) + err = rw.Create(context.Background(), tar) require.NoError(err) if opts.WithAddress != "" { - address, err := target.NewAddress(ctx, tar.GetPublicId(), opts.WithAddress) + address, err := target.NewAddress(tar.GetPublicId(), opts.WithAddress) require.NoError(err) require.NotNil(address) err = rw.Create(context.Background(), address) @@ -39,11 +39,11 @@ func TestTarget(ctx context.Context, t testing.TB, conn *db.DB, projectId, name if len(opts.WithHostSources) > 0 { newHostSets := make([]any, 0, len(opts.WithHostSources)) for _, s := range opts.WithHostSources { - hostSet, err := target.NewTargetHostSet(ctx, tar.GetPublicId(), s) + hostSet, err := target.NewTargetHostSet(tar.GetPublicId(), s) require.NoError(err) newHostSets = append(newHostSets, hostSet) } - err := rw.CreateItems(ctx, newHostSets) + err := rw.CreateItems(context.Background(), newHostSets) require.NoError(err) } if len(opts.WithCredentialLibraries) > 0 { @@ -52,7 +52,7 @@ func TestTarget(ctx context.Context, t testing.TB, conn *db.DB, projectId, name cl.TargetId = tar.GetPublicId() newCredLibs = append(newCredLibs, cl) } - err := rw.CreateItems(ctx, newCredLibs) + err := rw.CreateItems(context.Background(), newCredLibs) require.NoError(err) } if len(opts.WithStaticCredentials) > 0 { @@ -61,7 +61,7 @@ func TestTarget(ctx context.Context, t testing.TB, conn *db.DB, projectId, name c.TargetId = tar.GetPublicId() newCreds = append(newCreds, c) } - err := rw.CreateItems(ctx, newCreds) + err := rw.CreateItems(context.Background(), newCreds) require.NoError(err) } return tar diff --git a/internal/tests/cli/boundary/_auth.bash b/internal/tests/cli/boundary/_auth.bash index d95d146b3bc..e23642def15 100644 --- a/internal/tests/cli/boundary/_auth.bash +++ b/internal/tests/cli/boundary/_auth.bash @@ -5,9 +5,3 @@ function login() { export BP="${DEFAULT_PASSWORD}" boundary authenticate password -auth-method-id $DEFAULT_AMPW -login-name $1 -password env://BP } - - -function login_ldap() { - export BP="${DEFAULT_PASSWORD}" - boundary authenticate ldap -auth-method-id $DEFAULT_AMPW -login-name $1 -password env://BP -} diff --git a/internal/tests/cli/boundary/_helpers.bash b/internal/tests/cli/boundary/_helpers.bash index 9598d6d43d5..aa1badb6850 100644 --- a/internal/tests/cli/boundary/_helpers.bash +++ b/internal/tests/cli/boundary/_helpers.bash @@ -15,8 +15,6 @@ export DEFAULT_HOST_CATALOG="${DEFAULT_HOST_CATALOG:-hcst_1234567890}" export DEFAULT_HOST="${DEFAULT_HOST:-hst_1234567890}" export DEFAULT_USER="${DEFAULT_USER:-u_1234567890}" export DEFAULT_UNPRIVILEGED_USER="${DEFAULT_UNPRIVILEGED_USER:-u_0987654321}" -export DEFAULT_AMLDAP="${DEFAULT_AMLDAP:-amldap_1234567890}" - function strip() { echo "$1" | tr -d '"' diff --git a/internal/tests/cli/boundary/authenticate.bats b/internal/tests/cli/boundary/authenticate.bats deleted file mode 100644 index c9ec53d5493..00000000000 --- a/internal/tests/cli/boundary/authenticate.bats +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bats - -load _auth -load _helpers -load _auth_tokens - -export NEW_USER='test' - -@test "boundary/authenticate password: can login as unpriv user" { - run login $DEFAULT_UNPRIVILEGED_LOGIN - [ "$status" -eq 0 ] - run logout_cmd - [ "$status" -eq 0 ] -} - -@test "boundary/authenticate ldap: can login as unpriv user" { - run login_ldap $DEFAULT_UNPRIVILEGED_LOGIN - [ "$status" -eq 0 ] - run logout_cmd - [ "$status" -eq 0 ] -} - - diff --git a/internal/tests/cluster/recursive_anon_listing_test.go b/internal/tests/cluster/recursive_anon_listing_test.go index a6830fa48cc..04378f3fd78 100644 --- a/internal/tests/cluster/recursive_anon_listing_test.go +++ b/internal/tests/cluster/recursive_anon_listing_test.go @@ -32,18 +32,18 @@ func TestListAnonymousRecursing(t *testing.T) { require.NoError(err) require.NotNil(am) - // We expect to see all four with the normal token + // We expect to see all three with the normal token l, err := amClient.List(tc.Context(), scope.Global.String(), amapi.WithRecursive(true)) require.NoError(err) require.NotNil(l) require.Len(l.GetItems(), 4) - // Originally we also expect to see all four as anon user + // Originally we also expect to see all three as anon user amClient.ApiClient().SetToken("") l, err = amClient.List(tc.Context(), scope.Global.String(), amapi.WithRecursive(true)) require.NoError(err) require.NotNil(l) - require.Len(l.GetItems(), 4) + require.Len(l.GetItems(), 3) // Find the global roles and delete them rl, err := rolesClient.List(tc.Context(), scope.Global.String()) diff --git a/internal/types/subtypes/registry.go b/internal/types/subtypes/registry.go index 20ccc1dc262..8455f6b084e 100644 --- a/internal/types/subtypes/registry.go +++ b/internal/types/subtypes/registry.go @@ -5,7 +5,6 @@ package subtypes import ( - "context" "fmt" "strings" "sync" @@ -89,20 +88,20 @@ func (r *Registry) Prefixes() []string { // Register registers all the prefixes for a provided Subtype. Register returns // an error if the subtype has already been registered or if any of the // prefixes are associated with another subtype. -func (r *Registry) Register(ctx context.Context, subtype Subtype, prefixes ...string) error { +func (r *Registry) Register(subtype Subtype, prefixes ...string) error { r.Lock() defer r.Unlock() const op = "subtypes.(Registry).Register" if _, present := r.knownSubtypes[subtype]; present { - return errors.New(ctx, errors.SubtypeAlreadyRegistered, op, fmt.Sprintf("subtype %q already registered", subtype)) + return errors.NewDeprecated(errors.SubtypeAlreadyRegistered, op, fmt.Sprintf("subtype %q already registered", subtype)) } r.knownSubtypes[subtype] = nil for _, prefix := range prefixes { prefix = strings.TrimSpace(prefix) if st, ok := r.subtypesPrefixes[prefix]; ok { - return errors.New(ctx, errors.SubtypeAlreadyRegistered, op, fmt.Sprintf("prefix %q is already registered to subtype %q", prefix, st)) + return errors.NewDeprecated(errors.SubtypeAlreadyRegistered, op, fmt.Sprintf("prefix %q is already registered to subtype %q", prefix, st)) } r.subtypesPrefixes[prefix] = subtype } diff --git a/internal/types/subtypes/registry_test.go b/internal/types/subtypes/registry_test.go index 7c203ae570c..19589e87ef2 100644 --- a/internal/types/subtypes/registry_test.go +++ b/internal/types/subtypes/registry_test.go @@ -4,7 +4,6 @@ package subtypes_test import ( - "context" "testing" "github.com/hashicorp/boundary/internal/types/subtypes" @@ -14,7 +13,7 @@ import ( func TestSubtypeFromId(t *testing.T) { testSubtype := subtypes.Subtype("test") r := subtypes.NewRegistry() - r.Register(context.Background(), testSubtype, "tttst") + r.Register(testSubtype, "tttst") tests := []struct { name string given string @@ -43,7 +42,7 @@ func TestSubtypeFromId(t *testing.T) { func TestSubtypeFromType(t *testing.T) { testSubtype := subtypes.Subtype("test") r := subtypes.NewRegistry() - r.Register(context.Background(), testSubtype, "tttst") + r.Register(testSubtype, "tttst") tests := []struct { name string given string @@ -68,12 +67,11 @@ func TestSubtypeFromType(t *testing.T) { func TestRegister(t *testing.T) { r := subtypes.NewRegistry() - ctx := context.Background() - assert.NoError(t, r.Register(ctx, "test", "testprefix")) + assert.NoError(t, r.Register("test", "testprefix")) // registering multiple subtypes should be fine. - assert.NoError(t, r.Register(ctx, "second", "secondprefix")) + assert.NoError(t, r.Register("second", "secondprefix")) // registering another prefix with a different subtype errors. - assert.Error(t, r.Register(ctx, "third", "testprefix")) + assert.Error(t, r.Register("third", "testprefix")) // Registering the same subtype twice errors. - assert.Error(t, r.Register(ctx, "test", "repeatedprefix")) + assert.Error(t, r.Register("test", "repeatedprefix")) } diff --git a/sdk/pbs/controller/api/resources/roles/role.pb.go b/sdk/pbs/controller/api/resources/roles/role.pb.go index 1a5ba9c6f04..ad3ebac532e 100644 --- a/sdk/pbs/controller/api/resources/roles/role.pb.go +++ b/sdk/pbs/controller/api/resources/roles/role.pb.go @@ -99,12 +99,7 @@ type GrantJson struct { unknownFields protoimpl.UnknownFields // Output only. The ID, if set. - // Deprecated: use "ids" instead. - // - // Deprecated: Marked as deprecated in controller/api/resources/roles/v1/role.proto. Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty" class:"public"` // @gotags: `class:"public"` - // Output only. The IDs, if set. - Ids []string `protobuf:"bytes,4,rep,name=ids,proto3" json:"ids,omitempty" class:"public"` // @gotags: `class:"public"` // Output only. The type, if set. Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty" class:"public"` // @gotags: `class:"public"` // Output only. The actions. @@ -143,7 +138,6 @@ func (*GrantJson) Descriptor() ([]byte, []int) { return file_controller_api_resources_roles_v1_role_proto_rawDescGZIP(), []int{1} } -// Deprecated: Marked as deprecated in controller/api/resources/roles/v1/role.proto. func (x *GrantJson) GetId() string { if x != nil { return x.Id @@ -151,13 +145,6 @@ func (x *GrantJson) GetId() string { return "" } -func (x *GrantJson) GetIds() []string { - if x != nil { - return x.Ids - } - return nil -} - func (x *GrantJson) GetType() string { if x != nil { return x.Type @@ -427,78 +414,77 @@ var file_controller_api_resources_roles_v1_role_proto_rawDesc = []byte{ 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x22, 0x5f, 0x0a, 0x09, 0x47, - 0x72, 0x61, 0x6e, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x02, 0x69, 0x64, 0x12, 0x10, 0x0a, 0x03, - 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x12, - 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x79, 0x0a, 0x05, - 0x47, 0x72, 0x61, 0x6e, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x61, 0x77, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x72, 0x61, 0x77, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x61, 0x6e, 0x6f, 0x6e, - 0x69, 0x63, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x61, 0x6e, 0x6f, - 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x12, 0x40, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x72, - 0x6f, 0x6c, 0x65, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x72, 0x61, 0x6e, 0x74, 0x4a, 0x73, 0x6f, - 0x6e, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0xb9, 0x06, 0x0a, 0x04, 0x52, 0x6f, 0x6c, 0x65, - 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, - 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x14, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x12, 0x43, 0x0a, 0x05, - 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x2e, 0x76, 0x31, - 0x2e, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, - 0x65, 0x12, 0x46, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x28, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x14, 0xa0, - 0xda, 0x29, 0x01, 0xc2, 0xdd, 0x29, 0x0c, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x62, 0x0a, 0x0b, 0x64, 0x65, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x32, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x22, 0xa0, 0xda, - 0x29, 0x01, 0xc2, 0xdd, 0x29, 0x1a, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, - 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x3c, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x3e, 0x0a, - 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x46, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x18, 0x0a, - 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x50, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x6c, 0x0a, 0x0e, 0x67, 0x72, 0x61, 0x6e, 0x74, - 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x5a, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x26, 0xa0, - 0xda, 0x29, 0x01, 0xc2, 0xdd, 0x29, 0x1e, 0x0a, 0x0e, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, - 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x12, 0x0c, 0x47, 0x72, 0x61, 0x6e, 0x74, 0x53, 0x63, - 0x6f, 0x70, 0x65, 0x49, 0x64, 0x52, 0x0e, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x63, 0x6f, - 0x70, 0x65, 0x5f, 0x69, 0x64, 0x12, 0x24, 0x0a, 0x0d, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, - 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x64, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x70, 0x72, - 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x73, 0x12, 0x4c, 0x0a, 0x0a, 0x70, - 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x73, 0x18, 0x6e, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x2c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x72, 0x6f, 0x6c, 0x65, 0x73, - 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x52, 0x0a, 0x70, - 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x67, 0x72, 0x61, - 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x78, 0x20, 0x03, 0x28, 0x09, - 0x52, 0x0d, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x12, - 0x41, 0x0a, 0x06, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x73, 0x18, 0x82, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x28, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x72, 0x6f, 0x6c, 0x65, - 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x72, 0x61, 0x6e, 0x74, 0x52, 0x06, 0x67, 0x72, 0x61, 0x6e, - 0x74, 0x73, 0x12, 0x2f, 0x0a, 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, - 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0xac, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x42, 0x4c, 0x5a, 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, 0x6e, - 0x64, 0x61, 0x72, 0x79, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x73, 0x2f, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x3b, 0x72, 0x6f, 0x6c, 0x65, - 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x09, 0x52, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x22, 0x49, 0x0a, 0x09, 0x47, + 0x72, 0x61, 0x6e, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x79, 0x0a, 0x05, 0x47, 0x72, 0x61, 0x6e, 0x74, 0x12, + 0x10, 0x0a, 0x03, 0x72, 0x61, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x72, 0x61, + 0x77, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x12, + 0x40, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x2e, 0x76, + 0x31, 0x2e, 0x47, 0x72, 0x61, 0x6e, 0x74, 0x4a, 0x73, 0x6f, 0x6e, 0x52, 0x04, 0x6a, 0x73, 0x6f, + 0x6e, 0x22, 0xb9, 0x06, 0x0a, 0x04, 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x63, + 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x14, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x63, + 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x12, 0x43, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, + 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x2e, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x6f, 0x70, 0x65, + 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x46, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x28, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x14, 0xa0, 0xda, 0x29, 0x01, 0xc2, 0xdd, 0x29, + 0x0c, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x62, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x32, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x22, 0xa0, 0xda, 0x29, 0x01, 0xc2, 0xdd, 0x29, 0x1a, + 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0b, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x3c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x3e, 0x0a, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x46, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x50, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x12, 0x6c, 0x0a, 0x0e, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, + 0x5f, 0x69, 0x64, 0x18, 0x5a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x26, 0xa0, 0xda, 0x29, 0x01, 0xc2, 0xdd, 0x29, + 0x1e, 0x0a, 0x0e, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, + 0x64, 0x12, 0x0c, 0x47, 0x72, 0x61, 0x6e, 0x74, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x49, 0x64, 0x52, + 0x0e, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, 0x12, + 0x24, 0x0a, 0x0d, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x73, + 0x18, 0x64, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, + 0x6c, 0x5f, 0x69, 0x64, 0x73, 0x12, 0x4c, 0x0a, 0x0a, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, + 0x61, 0x6c, 0x73, 0x18, 0x6e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x73, 0x2e, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, + 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, + 0x61, 0x6c, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x73, 0x18, 0x78, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x67, 0x72, 0x61, 0x6e, + 0x74, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x41, 0x0a, 0x06, 0x67, 0x72, 0x61, + 0x6e, 0x74, 0x73, 0x18, 0x82, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x47, + 0x72, 0x61, 0x6e, 0x74, 0x52, 0x06, 0x67, 0x72, 0x61, 0x6e, 0x74, 0x73, 0x12, 0x2f, 0x0a, 0x12, + 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0xac, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, + 0x72, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x4c, 0x5a, + 0x4a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, + 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2f, 0x73, + 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, + 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x3b, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/pbs/controller/api/resources/session_recordings/session_recording.pb.go b/sdk/pbs/controller/api/resources/session_recordings/session_recording.pb.go index aa2268d5fdf..df1018d61b7 100644 --- a/sdk/pbs/controller/api/resources/session_recordings/session_recording.pb.go +++ b/sdk/pbs/controller/api/resources/session_recordings/session_recording.pb.go @@ -883,795 +883,6 @@ func (x *SshTargetAttributes) GetDefaultClientPort() uint32 { return 0 } -// CredentialStore contains all fields related to a Credential Store resource -type CredentialStore struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The ID of the Credential Store. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty" class:"public"` // @gotags: class:"public" - // The ID of the Scope of which this Credential Store is a part. - ScopeId string `protobuf:"bytes,2,opt,name=scope_id,proto3" json:"scope_id,omitempty" class:"public"` // @gotags: class:"public" - // The name for identification purposes if set. - Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty" class:"public"` // @gotags: class:"public" - // The description for identification purposes if set. - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty" class:"public"` // @gotags: class:"public" - // The Credential Store type. - Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty" class:"public"` // @gotags: class:"public" - // Types that are assignable to Attrs: - // - // *CredentialStore_Attributes - Attrs isCredentialStore_Attrs `protobuf_oneof:"attrs"` -} - -func (x *CredentialStore) Reset() { - *x = CredentialStore{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CredentialStore) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CredentialStore) ProtoMessage() {} - -func (x *CredentialStore) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CredentialStore.ProtoReflect.Descriptor instead. -func (*CredentialStore) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{8} -} - -func (x *CredentialStore) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *CredentialStore) GetScopeId() string { - if x != nil { - return x.ScopeId - } - return "" -} - -func (x *CredentialStore) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *CredentialStore) GetDescription() string { - if x != nil { - return x.Description - } - return "" -} - -func (x *CredentialStore) GetType() string { - if x != nil { - return x.Type - } - return "" -} - -func (m *CredentialStore) GetAttrs() isCredentialStore_Attrs { - if m != nil { - return m.Attrs - } - return nil -} - -func (x *CredentialStore) GetAttributes() *structpb.Struct { - if x, ok := x.GetAttrs().(*CredentialStore_Attributes); ok { - return x.Attributes - } - return nil -} - -type isCredentialStore_Attrs interface { - isCredentialStore_Attrs() -} - -type CredentialStore_Attributes struct { - // The attributes that are applicable for the specific Credential Store type. - Attributes *structpb.Struct `protobuf:"bytes,6,opt,name=attributes,proto3,oneof"` -} - -func (*CredentialStore_Attributes) isCredentialStore_Attrs() {} - -// The attributes of a vault typed Credential Store. -type VaultCredentialStoreAttributes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The complete url address of vault. - Address string `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty" class:"public"` // @gotags: class:"public" - // The namespace of vault used by this store - Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty" class:"public"` // @gotags: class:"public" - // The value to use as the SNI host when connecting to vault via TLS. - TlsServerName string `protobuf:"bytes,3,opt,name=tls_server_name,proto3" json:"tls_server_name,omitempty" class:"public"` // @gotags: class:"public" - // Indicates if verification of the TLS certificate is disabled. - TlsSkipVerify bool `protobuf:"varint,4,opt,name=tls_skip_verify,proto3" json:"tls_skip_verify,omitempty" class:"public"` // @gotags: class:"public" - // Filters to the worker(s) who can handle Vault requests for this cred store if set. - WorkerFilter string `protobuf:"bytes,5,opt,name=worker_filter,proto3" json:"worker_filter,omitempty" class:"public"` // @gotags: class:"public" -} - -func (x *VaultCredentialStoreAttributes) Reset() { - *x = VaultCredentialStoreAttributes{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *VaultCredentialStoreAttributes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*VaultCredentialStoreAttributes) ProtoMessage() {} - -func (x *VaultCredentialStoreAttributes) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use VaultCredentialStoreAttributes.ProtoReflect.Descriptor instead. -func (*VaultCredentialStoreAttributes) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{9} -} - -func (x *VaultCredentialStoreAttributes) GetAddress() string { - if x != nil { - return x.Address - } - return "" -} - -func (x *VaultCredentialStoreAttributes) GetNamespace() string { - if x != nil { - return x.Namespace - } - return "" -} - -func (x *VaultCredentialStoreAttributes) GetTlsServerName() string { - if x != nil { - return x.TlsServerName - } - return "" -} - -func (x *VaultCredentialStoreAttributes) GetTlsSkipVerify() bool { - if x != nil { - return x.TlsSkipVerify - } - return false -} - -func (x *VaultCredentialStoreAttributes) GetWorkerFilter() string { - if x != nil { - return x.WorkerFilter - } - return "" -} - -// Credential contains fields related to an Credential resource -type Credential struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The ID of the Credential. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty" class:"public"` // @gotags: class:"public" - // The Credential Store of which this Credential is a part. - CredentialStore *CredentialStore `protobuf:"bytes,2,opt,name=credential_store,proto3" json:"credential_store,omitempty" class:"public"` // @gotags: class:"public" - // The name of the credential. - Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty" class:"public"` // @gotags: class:"public" - // Optional user-set description. - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty" class:"public"` // @gotags: class:"public" - // The purposes for which this Credential was attached to the sesssion. - Purposes []string `protobuf:"bytes,5,rep,name=purposes,proto3" json:"purposes,omitempty"` - // The Credential type. - Type string `protobuf:"bytes,6,opt,name=type,proto3" json:"type,omitempty" class:"public"` // @gotags: class:"public" - // Types that are assignable to Attrs: - // - // *Credential_Attributes - Attrs isCredential_Attrs `protobuf_oneof:"attrs"` -} - -func (x *Credential) Reset() { - *x = Credential{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Credential) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Credential) ProtoMessage() {} - -func (x *Credential) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Credential.ProtoReflect.Descriptor instead. -func (*Credential) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{10} -} - -func (x *Credential) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *Credential) GetCredentialStore() *CredentialStore { - if x != nil { - return x.CredentialStore - } - return nil -} - -func (x *Credential) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *Credential) GetDescription() string { - if x != nil { - return x.Description - } - return "" -} - -func (x *Credential) GetPurposes() []string { - if x != nil { - return x.Purposes - } - return nil -} - -func (x *Credential) GetType() string { - if x != nil { - return x.Type - } - return "" -} - -func (m *Credential) GetAttrs() isCredential_Attrs { - if m != nil { - return m.Attrs - } - return nil -} - -func (x *Credential) GetAttributes() *structpb.Struct { - if x, ok := x.GetAttrs().(*Credential_Attributes); ok { - return x.Attributes - } - return nil -} - -type isCredential_Attrs interface { - isCredential_Attrs() -} - -type Credential_Attributes struct { - // The attributes that are applicable for the specific Credential type. - Attributes *structpb.Struct `protobuf:"bytes,7,opt,name=attributes,proto3,oneof"` -} - -func (*Credential_Attributes) isCredential_Attrs() {} - -// The attributes of a UsernamePassword Credential. -type UsernamePasswordCredentialAttributes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The username associated with the credential. - Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty" class:"public"` // @gotags: class:"public" - // The hmac value of the password. - PasswordHmac string `protobuf:"bytes,2,opt,name=password_hmac,json=passwordHmac,proto3" json:"password_hmac,omitempty" class:"public"` // @gotags: class:"public" -} - -func (x *UsernamePasswordCredentialAttributes) Reset() { - *x = UsernamePasswordCredentialAttributes{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *UsernamePasswordCredentialAttributes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*UsernamePasswordCredentialAttributes) ProtoMessage() {} - -func (x *UsernamePasswordCredentialAttributes) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use UsernamePasswordCredentialAttributes.ProtoReflect.Descriptor instead. -func (*UsernamePasswordCredentialAttributes) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{11} -} - -func (x *UsernamePasswordCredentialAttributes) GetUsername() string { - if x != nil { - return x.Username - } - return "" -} - -func (x *UsernamePasswordCredentialAttributes) GetPasswordHmac() string { - if x != nil { - return x.PasswordHmac - } - return "" -} - -// The attributes of a SshPrivateKey Credential. -type SshPrivateKeyCredentialAttributes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The username associated with the credential. - Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty" class:"public"` // @gotags: class:"public" - // The hmac value of the SSH private key. - PrivateKeyHmac string `protobuf:"bytes,2,opt,name=private_key_hmac,json=privateKeyHmac,proto3" json:"private_key_hmac,omitempty" class:"public"` // @gotags: class:"public" - // The hmac value of the SSH private key passphrase. - PrivateKeyPassphraseHmac string `protobuf:"bytes,3,opt,name=private_key_passphrase_hmac,json=privateKeyPassphraseHmac,proto3" json:"private_key_passphrase_hmac,omitempty" class:"public"` // @gotags: class:"public" -} - -func (x *SshPrivateKeyCredentialAttributes) Reset() { - *x = SshPrivateKeyCredentialAttributes{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SshPrivateKeyCredentialAttributes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SshPrivateKeyCredentialAttributes) ProtoMessage() {} - -func (x *SshPrivateKeyCredentialAttributes) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SshPrivateKeyCredentialAttributes.ProtoReflect.Descriptor instead. -func (*SshPrivateKeyCredentialAttributes) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{12} -} - -func (x *SshPrivateKeyCredentialAttributes) GetUsername() string { - if x != nil { - return x.Username - } - return "" -} - -func (x *SshPrivateKeyCredentialAttributes) GetPrivateKeyHmac() string { - if x != nil { - return x.PrivateKeyHmac - } - return "" -} - -func (x *SshPrivateKeyCredentialAttributes) GetPrivateKeyPassphraseHmac() string { - if x != nil { - return x.PrivateKeyPassphraseHmac - } - return "" -} - -// The attributes of a JSON Credential. -type JsonCredentialAttributes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The hmac value of the object. - ObjectHmac string `protobuf:"bytes,1,opt,name=object_hmac,json=objectHmac,proto3" json:"object_hmac,omitempty" class:"public"` // @gotags: class:"public" -} - -func (x *JsonCredentialAttributes) Reset() { - *x = JsonCredentialAttributes{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *JsonCredentialAttributes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*JsonCredentialAttributes) ProtoMessage() {} - -func (x *JsonCredentialAttributes) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use JsonCredentialAttributes.ProtoReflect.Descriptor instead. -func (*JsonCredentialAttributes) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{13} -} - -func (x *JsonCredentialAttributes) GetObjectHmac() string { - if x != nil { - return x.ObjectHmac - } - return "" -} - -// CredentialLibrary contains all fields related to an Credential Library resource -type CredentialLibrary struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The ID of the Credential Library. - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty" class:"public"` // @gotags: class:"public" - // The credential store of which this library is a part. - CredentialStore *CredentialStore `protobuf:"bytes,2,opt,name=credential_store,proto3" json:"credential_store,omitempty" class:"public"` // @gotags: class:"public" - // Optional name of this Credential Library. - Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty" class:"public"` // @gotags: class:"public" - // Optional user-set description of this Credential Library. - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty" class:"public"` // @gotags: class:"public" - // The purposes for which this CredentialLibrary was attached to the sesssion. - Purposes []string `protobuf:"bytes,5,rep,name=purposes,proto3" json:"purposes,omitempty"` - // The Credential Library type. - Type string `protobuf:"bytes,6,opt,name=type,proto3" json:"type,omitempty" class:"public"` // @gotags: class:"public" - // Types that are assignable to Attrs: - // - // *CredentialLibrary_Attributes - Attrs isCredentialLibrary_Attrs `protobuf_oneof:"attrs"` -} - -func (x *CredentialLibrary) Reset() { - *x = CredentialLibrary{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *CredentialLibrary) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*CredentialLibrary) ProtoMessage() {} - -func (x *CredentialLibrary) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use CredentialLibrary.ProtoReflect.Descriptor instead. -func (*CredentialLibrary) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{14} -} - -func (x *CredentialLibrary) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *CredentialLibrary) GetCredentialStore() *CredentialStore { - if x != nil { - return x.CredentialStore - } - return nil -} - -func (x *CredentialLibrary) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *CredentialLibrary) GetDescription() string { - if x != nil { - return x.Description - } - return "" -} - -func (x *CredentialLibrary) GetPurposes() []string { - if x != nil { - return x.Purposes - } - return nil -} - -func (x *CredentialLibrary) GetType() string { - if x != nil { - return x.Type - } - return "" -} - -func (m *CredentialLibrary) GetAttrs() isCredentialLibrary_Attrs { - if m != nil { - return m.Attrs - } - return nil -} - -func (x *CredentialLibrary) GetAttributes() *structpb.Struct { - if x, ok := x.GetAttrs().(*CredentialLibrary_Attributes); ok { - return x.Attributes - } - return nil -} - -type isCredentialLibrary_Attrs interface { - isCredentialLibrary_Attrs() -} - -type CredentialLibrary_Attributes struct { - // The attributes that are applicable for the specific Credential Library type. - Attributes *structpb.Struct `protobuf:"bytes,7,opt,name=attributes,proto3,oneof"` -} - -func (*CredentialLibrary_Attributes) isCredentialLibrary_Attrs() {} - -// The attributes of a vault typed Credential Library. -type VaultCredentialLibraryAttributes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The path in Vault to request credentials from. - Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty" class:"public"` // @gotags: class:"public" - // The HTTP method the library uses to communicate with Vault. - HttpMethod string `protobuf:"bytes,2,opt,name=http_method,json=httpMethod,proto3" json:"http_method,omitempty" class:"public"` // @gotags: class:"public" - // The body of the HTTP request the library sends to vault. - HttpRequestBody string `protobuf:"bytes,3,opt,name=http_request_body,json=httpRequestBody,proto3" json:"http_request_body,omitempty" class:"secret"` // @gotags: `class:"secret"` -} - -func (x *VaultCredentialLibraryAttributes) Reset() { - *x = VaultCredentialLibraryAttributes{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[15] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *VaultCredentialLibraryAttributes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*VaultCredentialLibraryAttributes) ProtoMessage() {} - -func (x *VaultCredentialLibraryAttributes) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[15] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use VaultCredentialLibraryAttributes.ProtoReflect.Descriptor instead. -func (*VaultCredentialLibraryAttributes) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{15} -} - -func (x *VaultCredentialLibraryAttributes) GetPath() string { - if x != nil { - return x.Path - } - return "" -} - -func (x *VaultCredentialLibraryAttributes) GetHttpMethod() string { - if x != nil { - return x.HttpMethod - } - return "" -} - -func (x *VaultCredentialLibraryAttributes) GetHttpRequestBody() string { - if x != nil { - return x.HttpRequestBody - } - return "" -} - -// The attributes of a vault SSH Certificate Credential Library. -type VaultSSHCertificateCredentialLibraryAttributes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // The path in Vault to request credentials from. - Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty" class:"public"` // @gotags: class:"public" - // The username used when making an SSH connection. - Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty" class:"sensitive"` // @gotags: `class:"sensitive"` - // The key type to use when generating an SSH private key. - KeyType string `protobuf:"bytes,3,opt,name=key_type,json=keyType,proto3" json:"key_type,omitempty" class:"public"` // @gotags: class:"public" - // The number of bits to use to generate an SSH private key. - KeyBits uint32 `protobuf:"varint,4,opt,name=key_bits,json=keyBits,proto3" json:"key_bits,omitempty" class:"public"` // @gotags: class:"public" - // The requested time to live for the certificate. - Ttl string `protobuf:"bytes,5,opt,name=ttl,proto3" json:"ttl,omitempty" class:"public"` // @gotags: class:"public" - // The critical options that the certificate should be signed for. - CriticalOptions map[string]string `protobuf:"bytes,6,rep,name=critical_options,json=criticalOptions,proto3" json:"critical_options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3" class:"public"` // @gotags: class:"public" - // The extensions that the certificate should be signed for. - Extensions map[string]string `protobuf:"bytes,7,rep,name=extensions,proto3" json:"extensions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3" class:"public"` // @gotags: class:"public" -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) Reset() { - *x = VaultSSHCertificateCredentialLibraryAttributes{} - if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[16] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*VaultSSHCertificateCredentialLibraryAttributes) ProtoMessage() {} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[16] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use VaultSSHCertificateCredentialLibraryAttributes.ProtoReflect.Descriptor instead. -func (*VaultSSHCertificateCredentialLibraryAttributes) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{16} -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetPath() string { - if x != nil { - return x.Path - } - return "" -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetUsername() string { - if x != nil { - return x.Username - } - return "" -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetKeyType() string { - if x != nil { - return x.KeyType - } - return "" -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetKeyBits() uint32 { - if x != nil { - return x.KeyBits - } - return 0 -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetTtl() string { - if x != nil { - return x.Ttl - } - return "" -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetCriticalOptions() map[string]string { - if x != nil { - return x.CriticalOptions - } - return nil -} - -func (x *VaultSSHCertificateCredentialLibraryAttributes) GetExtensions() map[string]string { - if x != nil { - return x.Extensions - } - return nil -} - // ValuesAtTime contain information about other Boundary resources as they // were at a certain time through the lifetime of the Session Recording. type ValuesAtTime struct { @@ -1685,16 +896,12 @@ type ValuesAtTime struct { Target *Target `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty" class:"public"` // @gotags: class:"public" // Information about the Host chosen for the session. Host *Host `protobuf:"bytes,3,opt,name=host,proto3" json:"host,omitempty" class:"public"` // @gotags: class:"public" - // Information about the Credentials used for this session. - Credentials []*Credential `protobuf:"bytes,4,rep,name=credentials,proto3" json:"credentials,omitempty"` - // Information about the Credential Libraries used for this session. - CredentialLibraries []*CredentialLibrary `protobuf:"bytes,5,rep,name=credential_libraries,proto3" json:"credential_libraries,omitempty"` } func (x *ValuesAtTime) Reset() { *x = ValuesAtTime{} if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[17] + mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1707,7 +914,7 @@ func (x *ValuesAtTime) String() string { func (*ValuesAtTime) ProtoMessage() {} func (x *ValuesAtTime) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[17] + mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1720,7 +927,7 @@ func (x *ValuesAtTime) ProtoReflect() protoreflect.Message { // Deprecated: Use ValuesAtTime.ProtoReflect.Descriptor instead. func (*ValuesAtTime) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{17} + return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{8} } func (x *ValuesAtTime) GetUser() *User { @@ -1744,20 +951,6 @@ func (x *ValuesAtTime) GetHost() *Host { return nil } -func (x *ValuesAtTime) GetCredentials() []*Credential { - if x != nil { - return x.Credentials - } - return nil -} - -func (x *ValuesAtTime) GetCredentialLibraries() []*CredentialLibrary { - if x != nil { - return x.CredentialLibraries - } - return nil -} - // SessionRecording contains information about the recording of a Session. type SessionRecording struct { state protoimpl.MessageState @@ -1819,7 +1012,7 @@ type SessionRecording struct { func (x *SessionRecording) Reset() { *x = SessionRecording{} if protoimpl.UnsafeEnabled { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[18] + mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1832,7 +1025,7 @@ func (x *SessionRecording) String() string { func (*SessionRecording) ProtoMessage() {} func (x *SessionRecording) ProtoReflect() protoreflect.Message { - mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[18] + mi := &file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1845,7 +1038,7 @@ func (x *SessionRecording) ProtoReflect() protoreflect.Message { // Deprecated: Use SessionRecording.ProtoReflect.Descriptor instead. func (*SessionRecording) Descriptor() ([]byte, []int) { - return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{18} + return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescGZIP(), []int{9} } func (x *SessionRecording) GetId() string { @@ -2149,234 +1342,89 @@ var file_controller_api_resources_sessionrecordings_v1_session_recording_proto_r 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x70, 0x6f, 0x72, 0x74, - 0x22, 0xcb, 0x01, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x5f, 0x69, 0x64, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x61, 0x74, - 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, - 0x62, 0x75, 0x74, 0x65, 0x73, 0x42, 0x07, 0x0a, 0x05, 0x61, 0x74, 0x74, 0x72, 0x73, 0x22, 0xd2, - 0x01, 0x0a, 0x1e, 0x56, 0x61, 0x75, 0x6c, 0x74, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, - 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x73, 0x12, 0x18, 0x0a, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x6e, - 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x28, 0x0a, 0x0f, 0x74, 0x6c, 0x73, - 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0f, 0x74, 0x6c, 0x73, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x0f, 0x74, 0x6c, 0x73, 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x5f, - 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x74, 0x6c, - 0x73, 0x5f, 0x73, 0x6b, 0x69, 0x70, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x12, 0x24, 0x0a, - 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x5f, 0x66, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x22, 0xb2, 0x02, 0x0a, 0x0a, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, - 0x61, 0x6c, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, - 0x69, 0x64, 0x12, 0x6a, 0x0a, 0x10, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, - 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x10, 0x63, 0x72, - 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x75, 0x72, 0x70, 0x6f, 0x73, 0x65, 0x73, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x75, 0x72, 0x70, 0x6f, 0x73, 0x65, 0x73, - 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x42, - 0x07, 0x0a, 0x05, 0x61, 0x74, 0x74, 0x72, 0x73, 0x22, 0x67, 0x0a, 0x24, 0x55, 0x73, 0x65, 0x72, - 0x6e, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x43, 0x72, 0x65, 0x64, - 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, - 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x23, 0x0a, 0x0d, - 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x5f, 0x68, 0x6d, 0x61, 0x63, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x48, 0x6d, 0x61, - 0x63, 0x22, 0xa8, 0x01, 0x0a, 0x21, 0x53, 0x73, 0x68, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, - 0x4b, 0x65, 0x79, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x41, 0x74, 0x74, - 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, - 0x65, 0x79, 0x5f, 0x68, 0x6d, 0x61, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x70, - 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x48, 0x6d, 0x61, 0x63, 0x12, 0x3d, 0x0a, - 0x1b, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x70, 0x61, 0x73, - 0x73, 0x70, 0x68, 0x72, 0x61, 0x73, 0x65, 0x5f, 0x68, 0x6d, 0x61, 0x63, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x18, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x50, 0x61, - 0x73, 0x73, 0x70, 0x68, 0x72, 0x61, 0x73, 0x65, 0x48, 0x6d, 0x61, 0x63, 0x22, 0x3b, 0x0a, 0x18, - 0x4a, 0x73, 0x6f, 0x6e, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x41, 0x74, - 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x5f, 0x68, 0x6d, 0x61, 0x63, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x6d, 0x61, 0x63, 0x22, 0xb9, 0x02, 0x0a, 0x11, 0x43, 0x72, - 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x4c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x79, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, - 0x6a, 0x0a, 0x10, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x10, 0x63, 0x72, 0x65, 0x64, 0x65, - 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x75, 0x72, 0x70, 0x6f, 0x73, 0x65, 0x73, 0x18, 0x05, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x08, 0x70, 0x75, 0x72, 0x70, 0x6f, 0x73, 0x65, 0x73, 0x12, 0x12, 0x0a, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x48, 0x00, - 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x42, 0x07, 0x0a, 0x05, - 0x61, 0x74, 0x74, 0x72, 0x73, 0x22, 0x83, 0x01, 0x0a, 0x20, 0x56, 0x61, 0x75, 0x6c, 0x74, 0x43, - 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x4c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x79, - 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, - 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, - 0x0a, 0x0b, 0x68, 0x74, 0x74, 0x70, 0x5f, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0a, 0x68, 0x74, 0x74, 0x70, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, - 0x2a, 0x0a, 0x11, 0x68, 0x74, 0x74, 0x70, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x5f, - 0x62, 0x6f, 0x64, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x68, 0x74, 0x74, 0x70, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x42, 0x6f, 0x64, 0x79, 0x22, 0xdb, 0x04, 0x0a, 0x2e, - 0x56, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x53, 0x48, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, - 0x61, 0x74, 0x65, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x4c, 0x69, 0x62, - 0x72, 0x61, 0x72, 0x79, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x12, - 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, - 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x19, - 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6b, 0x65, 0x79, - 0x5f, 0x62, 0x69, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x6b, 0x65, 0x79, - 0x42, 0x69, 0x74, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x74, 0x74, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x74, 0x74, 0x6c, 0x12, 0x9d, 0x01, 0x0a, 0x10, 0x63, 0x72, 0x69, 0x74, 0x69, - 0x63, 0x61, 0x6c, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, - 0x31, 0x2e, 0x56, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x53, 0x48, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x65, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x4c, - 0x69, 0x62, 0x72, 0x61, 0x72, 0x79, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, - 0x2e, 0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x63, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x4f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x8d, 0x01, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, - 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x6d, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, - 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x75, 0x6c, - 0x74, 0x53, 0x53, 0x48, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x43, - 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x4c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x79, - 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, - 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, - 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x42, 0x0a, 0x14, 0x43, 0x72, 0x69, 0x74, 0x69, 0x63, - 0x61, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3d, 0x0a, 0x0f, 0x45, 0x78, - 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xc2, 0x03, 0x0a, 0x0c, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x41, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x47, 0x0a, 0x04, 0x75, 0x73, - 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x73, 0x65, 0x72, 0x52, 0x04, 0x75, - 0x73, 0x65, 0x72, 0x12, 0x4d, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, - 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x12, 0x47, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x33, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, - 0x2e, 0x48, 0x6f, 0x73, 0x74, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x5b, 0x0a, 0x0b, 0x63, - 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x39, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, - 0x2e, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x52, 0x0b, 0x63, 0x72, 0x65, - 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x73, 0x12, 0x74, 0x0a, 0x14, 0x63, 0x72, 0x65, 0x64, - 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, - 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, - 0x6c, 0x4c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x79, 0x52, 0x14, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x6c, 0x69, 0x62, 0x72, 0x61, 0x72, 0x69, 0x65, 0x73, 0x22, 0xbe, - 0x07, 0x0a, 0x10, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, - 0x69, 0x6e, 0x67, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x12, 0x43, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x63, - 0x6f, 0x70, 0x65, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x49, 0x6e, 0x66, - 0x6f, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x12, 0x2c, 0x0a, 0x11, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x62, 0x75, 0x63, - 0x6b, 0x65, 0x74, 0x5f, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, - 0x75, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, - 0x75, 0x70, 0x12, 0x1e, 0x0a, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x64, 0x6f, 0x77, 0x6e, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x64, 0x6f, - 0x77, 0x6e, 0x12, 0x3e, 0x0a, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, - 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, - 0x6d, 0x65, 0x12, 0x3e, 0x0a, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, - 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, - 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, - 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x36, - 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x08, 0x65, 0x6e, - 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x1e, 0x0a, 0x0a, - 0x6d, 0x69, 0x6d, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x0f, 0x20, 0x03, 0x28, 0x09, - 0x52, 0x0a, 0x6d, 0x69, 0x6d, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, - 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x78, 0x0a, 0x15, 0x63, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, - 0x73, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x42, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x22, 0xef, 0x01, 0x0a, 0x0c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x41, 0x74, 0x54, 0x69, 0x6d, + 0x65, 0x12, 0x47, 0x0a, 0x04, 0x75, 0x73, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x33, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, + 0x55, 0x73, 0x65, 0x72, 0x52, 0x04, 0x75, 0x73, 0x65, 0x72, 0x12, 0x4d, 0x0a, 0x06, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x52, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x12, 0x47, 0x0a, 0x04, 0x68, 0x6f, 0x73, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, - 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x15, 0x63, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, - 0x67, 0x73, 0x12, 0x6b, 0x0a, 0x12, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, - 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3b, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x41, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x12, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, - 0x2e, 0x0a, 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x61, 0x75, 0x74, - 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, - 0x66, 0x5a, 0x64, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, - 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, - 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x73, 0x2f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, - 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x48, 0x6f, 0x73, 0x74, 0x52, 0x04, 0x68, 0x6f, + 0x73, 0x74, 0x22, 0xbe, 0x07, 0x0a, 0x10, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x43, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x73, 0x2e, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x63, 0x6f, 0x70, + 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x1e, 0x0a, 0x0a, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0a, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x12, 0x2c, 0x0a, 0x11, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x69, + 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x5f, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x62, 0x79, + 0x74, 0x65, 0x73, 0x5f, 0x75, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x62, 0x79, + 0x74, 0x65, 0x73, 0x5f, 0x75, 0x70, 0x12, 0x1e, 0x0a, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, + 0x64, 0x6f, 0x77, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, + 0x73, 0x5f, 0x64, 0x6f, 0x77, 0x6e, 0x12, 0x3e, 0x0a, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x3e, 0x0a, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0c, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, + 0x74, 0x69, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, + 0x6d, 0x65, 0x12, 0x36, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x52, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x64, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x0d, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x0e, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, + 0x12, 0x1e, 0x0a, 0x0a, 0x6d, 0x69, 0x6d, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x0f, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x69, 0x6d, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x10, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x78, 0x0a, 0x15, + 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x42, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x52, + 0x15, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x6b, 0x0a, 0x12, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x12, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x2e, + 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x41, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x52, + 0x12, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x12, 0x2e, 0x0a, 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, + 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x12, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x42, 0x66, 0x5a, 0x64, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x62, 0x6f, 0x75, 0x6e, + 0x64, 0x61, 0x72, 0x79, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x73, 0x2f, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x2f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x5f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( @@ -2391,78 +1439,58 @@ func file_controller_api_resources_sessionrecordings_v1_session_recording_proto_ return file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDescData } -var file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes = make([]protoimpl.MessageInfo, 21) +var file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes = make([]protoimpl.MessageInfo, 10) var file_controller_api_resources_sessionrecordings_v1_session_recording_proto_goTypes = []interface{}{ - (*ChannelRecording)(nil), // 0: controller.api.resources.sessionrecordings.v1.ChannelRecording - (*ConnectionRecording)(nil), // 1: controller.api.resources.sessionrecordings.v1.ConnectionRecording - (*User)(nil), // 2: controller.api.resources.sessionrecordings.v1.User - (*HostCatalog)(nil), // 3: controller.api.resources.sessionrecordings.v1.HostCatalog - (*Host)(nil), // 4: controller.api.resources.sessionrecordings.v1.Host - (*StaticHostAttributes)(nil), // 5: controller.api.resources.sessionrecordings.v1.StaticHostAttributes - (*Target)(nil), // 6: controller.api.resources.sessionrecordings.v1.Target - (*SshTargetAttributes)(nil), // 7: controller.api.resources.sessionrecordings.v1.SshTargetAttributes - (*CredentialStore)(nil), // 8: controller.api.resources.sessionrecordings.v1.CredentialStore - (*VaultCredentialStoreAttributes)(nil), // 9: controller.api.resources.sessionrecordings.v1.VaultCredentialStoreAttributes - (*Credential)(nil), // 10: controller.api.resources.sessionrecordings.v1.Credential - (*UsernamePasswordCredentialAttributes)(nil), // 11: controller.api.resources.sessionrecordings.v1.UsernamePasswordCredentialAttributes - (*SshPrivateKeyCredentialAttributes)(nil), // 12: controller.api.resources.sessionrecordings.v1.SshPrivateKeyCredentialAttributes - (*JsonCredentialAttributes)(nil), // 13: controller.api.resources.sessionrecordings.v1.JsonCredentialAttributes - (*CredentialLibrary)(nil), // 14: controller.api.resources.sessionrecordings.v1.CredentialLibrary - (*VaultCredentialLibraryAttributes)(nil), // 15: controller.api.resources.sessionrecordings.v1.VaultCredentialLibraryAttributes - (*VaultSSHCertificateCredentialLibraryAttributes)(nil), // 16: controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes - (*ValuesAtTime)(nil), // 17: controller.api.resources.sessionrecordings.v1.ValuesAtTime - (*SessionRecording)(nil), // 18: controller.api.resources.sessionrecordings.v1.SessionRecording - nil, // 19: controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes.CriticalOptionsEntry - nil, // 20: controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes.ExtensionsEntry - (*timestamppb.Timestamp)(nil), // 21: google.protobuf.Timestamp - (*durationpb.Duration)(nil), // 22: google.protobuf.Duration - (*scopes.ScopeInfo)(nil), // 23: controller.api.resources.scopes.v1.ScopeInfo - (*structpb.Struct)(nil), // 24: google.protobuf.Struct + (*ChannelRecording)(nil), // 0: controller.api.resources.sessionrecordings.v1.ChannelRecording + (*ConnectionRecording)(nil), // 1: controller.api.resources.sessionrecordings.v1.ConnectionRecording + (*User)(nil), // 2: controller.api.resources.sessionrecordings.v1.User + (*HostCatalog)(nil), // 3: controller.api.resources.sessionrecordings.v1.HostCatalog + (*Host)(nil), // 4: controller.api.resources.sessionrecordings.v1.Host + (*StaticHostAttributes)(nil), // 5: controller.api.resources.sessionrecordings.v1.StaticHostAttributes + (*Target)(nil), // 6: controller.api.resources.sessionrecordings.v1.Target + (*SshTargetAttributes)(nil), // 7: controller.api.resources.sessionrecordings.v1.SshTargetAttributes + (*ValuesAtTime)(nil), // 8: controller.api.resources.sessionrecordings.v1.ValuesAtTime + (*SessionRecording)(nil), // 9: controller.api.resources.sessionrecordings.v1.SessionRecording + (*timestamppb.Timestamp)(nil), // 10: google.protobuf.Timestamp + (*durationpb.Duration)(nil), // 11: google.protobuf.Duration + (*scopes.ScopeInfo)(nil), // 12: controller.api.resources.scopes.v1.ScopeInfo + (*structpb.Struct)(nil), // 13: google.protobuf.Struct } var file_controller_api_resources_sessionrecordings_v1_session_recording_proto_depIdxs = []int32{ - 21, // 0: controller.api.resources.sessionrecordings.v1.ChannelRecording.created_time:type_name -> google.protobuf.Timestamp - 21, // 1: controller.api.resources.sessionrecordings.v1.ChannelRecording.updated_time:type_name -> google.protobuf.Timestamp - 21, // 2: controller.api.resources.sessionrecordings.v1.ChannelRecording.start_time:type_name -> google.protobuf.Timestamp - 21, // 3: controller.api.resources.sessionrecordings.v1.ChannelRecording.end_time:type_name -> google.protobuf.Timestamp - 22, // 4: controller.api.resources.sessionrecordings.v1.ChannelRecording.duration:type_name -> google.protobuf.Duration - 21, // 5: controller.api.resources.sessionrecordings.v1.ConnectionRecording.created_time:type_name -> google.protobuf.Timestamp - 21, // 6: controller.api.resources.sessionrecordings.v1.ConnectionRecording.updated_time:type_name -> google.protobuf.Timestamp - 21, // 7: controller.api.resources.sessionrecordings.v1.ConnectionRecording.start_time:type_name -> google.protobuf.Timestamp - 21, // 8: controller.api.resources.sessionrecordings.v1.ConnectionRecording.end_time:type_name -> google.protobuf.Timestamp - 22, // 9: controller.api.resources.sessionrecordings.v1.ConnectionRecording.duration:type_name -> google.protobuf.Duration + 10, // 0: controller.api.resources.sessionrecordings.v1.ChannelRecording.created_time:type_name -> google.protobuf.Timestamp + 10, // 1: controller.api.resources.sessionrecordings.v1.ChannelRecording.updated_time:type_name -> google.protobuf.Timestamp + 10, // 2: controller.api.resources.sessionrecordings.v1.ChannelRecording.start_time:type_name -> google.protobuf.Timestamp + 10, // 3: controller.api.resources.sessionrecordings.v1.ChannelRecording.end_time:type_name -> google.protobuf.Timestamp + 11, // 4: controller.api.resources.sessionrecordings.v1.ChannelRecording.duration:type_name -> google.protobuf.Duration + 10, // 5: controller.api.resources.sessionrecordings.v1.ConnectionRecording.created_time:type_name -> google.protobuf.Timestamp + 10, // 6: controller.api.resources.sessionrecordings.v1.ConnectionRecording.updated_time:type_name -> google.protobuf.Timestamp + 10, // 7: controller.api.resources.sessionrecordings.v1.ConnectionRecording.start_time:type_name -> google.protobuf.Timestamp + 10, // 8: controller.api.resources.sessionrecordings.v1.ConnectionRecording.end_time:type_name -> google.protobuf.Timestamp + 11, // 9: controller.api.resources.sessionrecordings.v1.ConnectionRecording.duration:type_name -> google.protobuf.Duration 0, // 10: controller.api.resources.sessionrecordings.v1.ConnectionRecording.channel_recordings:type_name -> controller.api.resources.sessionrecordings.v1.ChannelRecording - 23, // 11: controller.api.resources.sessionrecordings.v1.User.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo - 23, // 12: controller.api.resources.sessionrecordings.v1.HostCatalog.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo - 24, // 13: controller.api.resources.sessionrecordings.v1.HostCatalog.attributes:type_name -> google.protobuf.Struct + 12, // 11: controller.api.resources.sessionrecordings.v1.User.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo + 12, // 12: controller.api.resources.sessionrecordings.v1.HostCatalog.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo + 13, // 13: controller.api.resources.sessionrecordings.v1.HostCatalog.attributes:type_name -> google.protobuf.Struct 3, // 14: controller.api.resources.sessionrecordings.v1.Host.host_catalog:type_name -> controller.api.resources.sessionrecordings.v1.HostCatalog - 24, // 15: controller.api.resources.sessionrecordings.v1.Host.attributes:type_name -> google.protobuf.Struct - 23, // 16: controller.api.resources.sessionrecordings.v1.Target.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo - 24, // 17: controller.api.resources.sessionrecordings.v1.Target.attributes:type_name -> google.protobuf.Struct - 24, // 18: controller.api.resources.sessionrecordings.v1.CredentialStore.attributes:type_name -> google.protobuf.Struct - 8, // 19: controller.api.resources.sessionrecordings.v1.Credential.credential_store:type_name -> controller.api.resources.sessionrecordings.v1.CredentialStore - 24, // 20: controller.api.resources.sessionrecordings.v1.Credential.attributes:type_name -> google.protobuf.Struct - 8, // 21: controller.api.resources.sessionrecordings.v1.CredentialLibrary.credential_store:type_name -> controller.api.resources.sessionrecordings.v1.CredentialStore - 24, // 22: controller.api.resources.sessionrecordings.v1.CredentialLibrary.attributes:type_name -> google.protobuf.Struct - 19, // 23: controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes.critical_options:type_name -> controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes.CriticalOptionsEntry - 20, // 24: controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes.extensions:type_name -> controller.api.resources.sessionrecordings.v1.VaultSSHCertificateCredentialLibraryAttributes.ExtensionsEntry - 2, // 25: controller.api.resources.sessionrecordings.v1.ValuesAtTime.user:type_name -> controller.api.resources.sessionrecordings.v1.User - 6, // 26: controller.api.resources.sessionrecordings.v1.ValuesAtTime.target:type_name -> controller.api.resources.sessionrecordings.v1.Target - 4, // 27: controller.api.resources.sessionrecordings.v1.ValuesAtTime.host:type_name -> controller.api.resources.sessionrecordings.v1.Host - 10, // 28: controller.api.resources.sessionrecordings.v1.ValuesAtTime.credentials:type_name -> controller.api.resources.sessionrecordings.v1.Credential - 14, // 29: controller.api.resources.sessionrecordings.v1.ValuesAtTime.credential_libraries:type_name -> controller.api.resources.sessionrecordings.v1.CredentialLibrary - 23, // 30: controller.api.resources.sessionrecordings.v1.SessionRecording.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo - 21, // 31: controller.api.resources.sessionrecordings.v1.SessionRecording.created_time:type_name -> google.protobuf.Timestamp - 21, // 32: controller.api.resources.sessionrecordings.v1.SessionRecording.updated_time:type_name -> google.protobuf.Timestamp - 21, // 33: controller.api.resources.sessionrecordings.v1.SessionRecording.start_time:type_name -> google.protobuf.Timestamp - 21, // 34: controller.api.resources.sessionrecordings.v1.SessionRecording.end_time:type_name -> google.protobuf.Timestamp - 22, // 35: controller.api.resources.sessionrecordings.v1.SessionRecording.duration:type_name -> google.protobuf.Duration - 1, // 36: controller.api.resources.sessionrecordings.v1.SessionRecording.connection_recordings:type_name -> controller.api.resources.sessionrecordings.v1.ConnectionRecording - 17, // 37: controller.api.resources.sessionrecordings.v1.SessionRecording.create_time_values:type_name -> controller.api.resources.sessionrecordings.v1.ValuesAtTime - 38, // [38:38] is the sub-list for method output_type - 38, // [38:38] is the sub-list for method input_type - 38, // [38:38] is the sub-list for extension type_name - 38, // [38:38] is the sub-list for extension extendee - 0, // [0:38] is the sub-list for field type_name + 13, // 15: controller.api.resources.sessionrecordings.v1.Host.attributes:type_name -> google.protobuf.Struct + 12, // 16: controller.api.resources.sessionrecordings.v1.Target.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo + 13, // 17: controller.api.resources.sessionrecordings.v1.Target.attributes:type_name -> google.protobuf.Struct + 2, // 18: controller.api.resources.sessionrecordings.v1.ValuesAtTime.user:type_name -> controller.api.resources.sessionrecordings.v1.User + 6, // 19: controller.api.resources.sessionrecordings.v1.ValuesAtTime.target:type_name -> controller.api.resources.sessionrecordings.v1.Target + 4, // 20: controller.api.resources.sessionrecordings.v1.ValuesAtTime.host:type_name -> controller.api.resources.sessionrecordings.v1.Host + 12, // 21: controller.api.resources.sessionrecordings.v1.SessionRecording.scope:type_name -> controller.api.resources.scopes.v1.ScopeInfo + 10, // 22: controller.api.resources.sessionrecordings.v1.SessionRecording.created_time:type_name -> google.protobuf.Timestamp + 10, // 23: controller.api.resources.sessionrecordings.v1.SessionRecording.updated_time:type_name -> google.protobuf.Timestamp + 10, // 24: controller.api.resources.sessionrecordings.v1.SessionRecording.start_time:type_name -> google.protobuf.Timestamp + 10, // 25: controller.api.resources.sessionrecordings.v1.SessionRecording.end_time:type_name -> google.protobuf.Timestamp + 11, // 26: controller.api.resources.sessionrecordings.v1.SessionRecording.duration:type_name -> google.protobuf.Duration + 1, // 27: controller.api.resources.sessionrecordings.v1.SessionRecording.connection_recordings:type_name -> controller.api.resources.sessionrecordings.v1.ConnectionRecording + 8, // 28: controller.api.resources.sessionrecordings.v1.SessionRecording.create_time_values:type_name -> controller.api.resources.sessionrecordings.v1.ValuesAtTime + 29, // [29:29] is the sub-list for method output_type + 29, // [29:29] is the sub-list for method input_type + 29, // [29:29] is the sub-list for extension type_name + 29, // [29:29] is the sub-list for extension extendee + 0, // [0:29] is the sub-list for field type_name } func init() { file_controller_api_resources_sessionrecordings_v1_session_recording_proto_init() } @@ -2568,114 +1596,6 @@ func file_controller_api_resources_sessionrecordings_v1_session_recording_proto_ } } file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CredentialStore); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VaultCredentialStoreAttributes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Credential); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UsernamePasswordCredentialAttributes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SshPrivateKeyCredentialAttributes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*JsonCredentialAttributes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CredentialLibrary); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VaultCredentialLibraryAttributes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VaultSSHCertificateCredentialLibraryAttributes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ValuesAtTime); i { case 0: return &v.state @@ -2687,7 +1607,7 @@ func file_controller_api_resources_sessionrecordings_v1_session_recording_proto_ return nil } } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SessionRecording); i { case 0: return &v.state @@ -2709,22 +1629,13 @@ func file_controller_api_resources_sessionrecordings_v1_session_recording_proto_ file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[6].OneofWrappers = []interface{}{ (*Target_Attributes)(nil), } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[8].OneofWrappers = []interface{}{ - (*CredentialStore_Attributes)(nil), - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[10].OneofWrappers = []interface{}{ - (*Credential_Attributes)(nil), - } - file_controller_api_resources_sessionrecordings_v1_session_recording_proto_msgTypes[14].OneofWrappers = []interface{}{ - (*CredentialLibrary_Attributes)(nil), - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_controller_api_resources_sessionrecordings_v1_session_recording_proto_rawDesc, NumEnums: 0, - NumMessages: 21, + NumMessages: 10, NumExtensions: 0, NumServices: 0, }, diff --git a/sdk/pbs/plugin/host_plugin_service_grpc.pb.go b/sdk/pbs/plugin/host_plugin_service_grpc.pb.go index 335df861505..60d72ff117f 100644 --- a/sdk/pbs/plugin/host_plugin_service_grpc.pb.go +++ b/sdk/pbs/plugin/host_plugin_service_grpc.pb.go @@ -22,20 +22,6 @@ type HostPluginServiceClient interface { // allows those values to be normalized prior to creating or updating those // values in the host catalog data. // - // NormalizeCatalogData is useful for converting the values of attributes from - // a certain format/type to an expected value format/type. This is useful - // during migration of values. - // - // NormalizeCatalogData is called before the values of attributes are persisted. - // All normalized values will be persisted in Boundary and returned - // to all clients. - // - // NormalizeCatalogData could affect other clients. For example, on Terraform, - // if data is passed to Boundary and then normalized into a new data - // structure, it could cause diffs in Terraform for unchanged values. - // This is because, the data structure in Terraform's state will now be - // different from the normalized data structure returned from Boundary. - // // NormalizeCatalogData is called before: // * OnCreateCatalog // * OnUpdateCatalog @@ -53,20 +39,6 @@ type HostPluginServiceClient interface { // allows those values to be normalized prior to creating or updating those // values in the host set data. // - // NormalizeSetData is useful for converting the values of attributes from - // a certain format/type to an expected value format/type. This is useful - // during migration of values. - // - // NormalizeSetData is called before the values of attributes are persisted. - // All normalized values will be persisted in Boundary and returned - // to all clients. - // - // NormalizeSetData could affect other clients. For example, on Terraform, - // if data is passed to Boundary and then normalized into a new data - // structure, it could cause diffs in Terraform for unchanged values. - // This is because, the data structure in Terraform's state will now be - // different from the normalized data structure returned from Boundary. - // // NormalizeSetData is called before: // * OnCreateSet // * OnUpdateSet @@ -178,20 +150,6 @@ type HostPluginServiceServer interface { // allows those values to be normalized prior to creating or updating those // values in the host catalog data. // - // NormalizeCatalogData is useful for converting the values of attributes from - // a certain format/type to an expected value format/type. This is useful - // during migration of values. - // - // NormalizeCatalogData is called before the values of attributes are persisted. - // All normalized values will be persisted in Boundary and returned - // to all clients. - // - // NormalizeCatalogData could affect other clients. For example, on Terraform, - // if data is passed to Boundary and then normalized into a new data - // structure, it could cause diffs in Terraform for unchanged values. - // This is because, the data structure in Terraform's state will now be - // different from the normalized data structure returned from Boundary. - // // NormalizeCatalogData is called before: // * OnCreateCatalog // * OnUpdateCatalog @@ -209,20 +167,6 @@ type HostPluginServiceServer interface { // allows those values to be normalized prior to creating or updating those // values in the host set data. // - // NormalizeSetData is useful for converting the values of attributes from - // a certain format/type to an expected value format/type. This is useful - // during migration of values. - // - // NormalizeSetData is called before the values of attributes are persisted. - // All normalized values will be persisted in Boundary and returned - // to all clients. - // - // NormalizeSetData could affect other clients. For example, on Terraform, - // if data is passed to Boundary and then normalized into a new data - // structure, it could cause diffs in Terraform for unchanged values. - // This is because, the data structure in Terraform's state will now be - // different from the normalized data structure returned from Boundary. - // // NormalizeSetData is called before: // * OnCreateSet // * OnUpdateSet diff --git a/testing/dbtest/session_list_benchmarks_dump_generation_test.go b/testing/dbtest/session_list_benchmarks_dump_generation_test.go index ad76dd134d5..e08d1b37666 100644 --- a/testing/dbtest/session_list_benchmarks_dump_generation_test.go +++ b/testing/dbtest/session_list_benchmarks_dump_generation_test.go @@ -125,9 +125,9 @@ func TestGenerateSessionBenchmarkTemplateDumps(t *testing.T) { kms := kms.TestKms(t, conn, wrap) iamRepo := iam.TestRepo(t, conn, wrap) - authTokenRepo, err := authtoken.NewRepository(ctx, rw, rw, kms) + authTokenRepo, err := authtoken.NewRepository(rw, rw, kms) require.NoError(err) - pwRepo, err := password.NewRepository(ctx, rw, rw, kms) + pwRepo, err := password.NewRepository(rw, rw, kms) require.NoError(err) sessRepo, err := session.NewRepository(ctx, rw, rw, kms) require.NoError(err) @@ -255,7 +255,7 @@ func newUser(t testing.TB, ctx context.Context, iamRepo *iam.Repository, authTok require := require.New(t) o, pWithSessions := iam.TestScopes(t, iamRepo) am := password.TestAuthMethod(t, conn, o.GetPublicId()) - acct, err := password.NewAccount(ctx, am.GetPublicId(), password.WithLoginName(name)) + acct, err := password.NewAccount(am.GetPublicId(), password.WithLoginName(name)) require.NoError(err) acct, err = pwRepo.CreateAccount(ctx, o.PublicId, acct, password.WithPassword(dbtest.BoundaryBenchmarksUserPassword)) require.NoError(err) diff --git a/testing/internal/e2e/README.md b/testing/internal/e2e/README.md index c3aee25459b..1992146f153 100644 --- a/testing/internal/e2e/README.md +++ b/testing/internal/e2e/README.md @@ -37,7 +37,7 @@ Set the appropriate environment variables... ```shell export E2E_TESTS=true # This is needed for any e2e test. Otherwise, the test is skipped -# For e2e/tests/base +# For e2e/tests/static export BOUNDARY_ADDR= # e.g. http://127.0.0.1:9200 export E2E_PASSWORD_AUTH_METHOD_ID= # e.g. ampw_1234567890 export E2E_PASSWORD_ADMIN_LOGIN_NAME= # e.g. "admin" @@ -47,7 +47,7 @@ export E2E_TARGET_IP= # e.g. 192.168.0.1 export E2E_SSH_KEY_PATH= # e.g. /Users/username/key.pem export E2E_SSH_USER= # e.g. ubuntu -# For e2e/tests/base_with_vault +# For e2e/tests/static_with_vault export BOUNDARY_ADDR= # e.g. http://127.0.0.1:9200 export E2E_PASSWORD_AUTH_METHOD_ID= # e.g. ampw_1234567890 export E2E_PASSWORD_ADMIN_LOGIN_NAME= # e.g. "admin" @@ -84,10 +84,10 @@ export VAULT_TOKEN= Then, run... ```shell -go test github.com/hashicorp/boundary/testing/internal/e2e/tests/base +go test github.com/hashicorp/boundary/testing/internal/e2e/tests/static go test ./target/ // run target tests if running from this directory -go test github.com/hashicorp/boundary/testing/internal/e2e/tests/base -v // verbose -go test github.com/hashicorp/boundary/testing/internal/e2e/tests/base -v -run '^TestCreateTargetApi$' // run a specific test +go test github.com/hashicorp/boundary/testing/internal/e2e/tests/static -v // verbose +go test github.com/hashicorp/boundary/testing/internal/e2e/tests/static -v -run '^TestCreateTargetApi$' // run a specific test ``` ## Adding Tests diff --git a/testing/internal/e2e/boundary/boundary.go b/testing/internal/e2e/boundary/boundary.go index d4975de35ba..0625be7e021 100644 --- a/testing/internal/e2e/boundary/boundary.go +++ b/testing/internal/e2e/boundary/boundary.go @@ -8,12 +8,6 @@ import ( "github.com/hashicorp/boundary/api/authmethods" ) -// ConnectCliOutput parses the json response from running `boundary connect` -type ConnectCliOutput struct { - Port int `json:"port"` - Address string `json:"address"` -} - // AuthenticateCliOutput parses the json response from running `boundary authenticate` type AuthenticateCliOutput struct { Item *authmethods.AuthenticateResult diff --git a/testing/internal/e2e/boundary/connect.go b/testing/internal/e2e/boundary/connect.go deleted file mode 100644 index 0ac4bf1294e..00000000000 --- a/testing/internal/e2e/boundary/connect.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package boundary - -import ( - "bufio" - "context" - "encoding/json" - "os/exec" - "testing" - - "github.com/creack/pty" - "github.com/stretchr/testify/require" -) - -// ConnectCli uses the boundary CLI to establish connection to the target. -// It then parses proxy details from the command output and returns them. -// The connection must be closed separately via the `boundary sessions cancel` command -func ConnectCli(t testing.TB, ctx context.Context, targetId string) ConnectCliOutput { - cmd := exec.CommandContext(ctx, - "boundary", "connect", - "-target-id", targetId, - "-format", "json", - ) - fileConnect, err := pty.Start(cmd) - require.NoError(t, err) - t.Cleanup(func() { - err := fileConnect.Close() - require.NoError(t, err) - }) - - scanner := bufio.NewScanner(fileConnect) - var connectCliOutputJson string - if scanner.Scan() { - connectCliOutputJson = scanner.Text() - } - require.NoError(t, scanner.Err()) - - var connectCliOutput ConnectCliOutput - err = json.Unmarshal([]byte(connectCliOutputJson), &connectCliOutput) - require.NoError(t, err) - - return connectCliOutput -} diff --git a/testing/internal/e2e/boundary/scope.go b/testing/internal/e2e/boundary/scope.go index 777cf6a2957..bb81d0da3ca 100644 --- a/testing/internal/e2e/boundary/scope.go +++ b/testing/internal/e2e/boundary/scope.go @@ -64,24 +64,14 @@ func CreateNewOrgCli(t testing.TB, ctx context.Context) string { // CreateNewProjectCli creates a new project in boundary using the cli. The project will be created // under the provided org id. // Returns the id of the new project. -func CreateNewProjectCli(t testing.TB, ctx context.Context, orgId string, opt ...ScopeOption) string { - opts := getScopeOpts(opt...) - var args []string - - args = append(args, - "scopes", "create", - "-scope-id", orgId, - "-format", "json", - ) - - if opts.WithName != "" { - args = append(args, "-name", opts.WithName) - } else { - args = append(args, "-name", "e2e Project") - } - +func CreateNewProjectCli(t testing.TB, ctx context.Context, orgId string) string { output := e2e.RunCommand(ctx, "boundary", - e2e.WithArgs(args...), + e2e.WithArgs( + "scopes", "create", + "-name", "e2e Project", + "-scope-id", orgId, + "-format", "json", + ), ) require.NoError(t, output.Err, string(output.Stderr)) @@ -93,29 +83,3 @@ func CreateNewProjectCli(t testing.TB, ctx context.Context, orgId string, opt .. t.Logf("Created Project Id: %s", newProjectId) return newProjectId } - -// getScopeOpts iterates the inbound ScopeOptions and returns a struct -func getScopeOpts(opt ...ScopeOption) scopeOptions { - opts := scopeOptions{ - WithName: "", - } - for _, o := range opt { - o(&opts) - } - return opts -} - -// ScopeOption represents how Options are passed as arguments -type ScopeOption func(*scopeOptions) - -// scopeOptions is a struct representing available options for scopes -type scopeOptions struct { - WithName string -} - -// WithName provides an option to search by a friendly name -func WithName(name string) ScopeOption { - return func(o *scopeOptions) { - o.WithName = name - } -} diff --git a/testing/internal/e2e/boundary/target.go b/testing/internal/e2e/boundary/target.go index 171f7925267..9e666cb065e 100644 --- a/testing/internal/e2e/boundary/target.go +++ b/testing/internal/e2e/boundary/target.go @@ -48,54 +48,53 @@ func AddHostSourceToTargetApi(t testing.TB, ctx context.Context, client *api.Cli // Returns the id of the new target. func CreateNewTargetCli(t testing.TB, ctx context.Context, projectId string, defaultPort string, opt ...target.Option) string { opts := target.GetOpts(opt...) - var args []string - // Set target type. Default to tcp if not specified - if opts.WithType != "" { - args = append(args, string(opts.WithType)) - } else { - args = append(args, "tcp") - } - - args = append(args, + args := []string{ + "targets", "create", "tcp", "-scope-id", projectId, "-default-port", defaultPort, + "-name", "e2e Target", "-format", "json", - ) - - if opts.WithName != "" { - args = append(args, "-name", opts.WithName) - } else { - args = append(args, "-name", "e2e Target") - } - if opts.WithAddress != "" { - args = append(args, "-address", opts.WithAddress) } + if opts.WithDefaultClientPort != 0 { args = append(args, "-default-client-port", fmt.Sprintf("%d", opts.WithDefaultClientPort)) } - if opts.WithEnableSessionRecording != false { - args = append(args, "-enable-session-recording", fmt.Sprintf("%v", opts.WithEnableSessionRecording)) - } - if opts.WithStorageBucketId != "" { - args = append(args, "-storage-bucket-id", opts.WithStorageBucketId) - } - if opts.WithIngressWorkerFilter != "" { - args = append(args, "-ingress-worker-filter", opts.WithIngressWorkerFilter) - } output := e2e.RunCommand(ctx, "boundary", - e2e.WithArgs("targets", "create"), e2e.WithArgs(args...), ) require.NoError(t, output.Err, string(output.Stderr)) - var newTargetResult targets.TargetCreateResult err := json.Unmarshal(output.Stdout, &newTargetResult) require.NoError(t, err) + newTargetId := newTargetResult.Item.Id + t.Logf("Created Target: %s", newTargetId) + + return newTargetId +} +// CreateNewAddressTargetCli uses the cli to create a new target using an +// address in boundary. +// Returns the id of the new target. +func CreateNewAddressTargetCli(t testing.TB, ctx context.Context, projectId string, defaultPort string, address string) string { + output := e2e.RunCommand(ctx, "boundary", + e2e.WithArgs( + "targets", "create", "tcp", + "-scope-id", projectId, + "-default-port", defaultPort, + "-name", "e2e Target", + "-address", address, + "-format", "json", + ), + ) + require.NoError(t, output.Err, string(output.Stderr)) + var newTargetResult targets.TargetCreateResult + err := json.Unmarshal(output.Stdout, &newTargetResult) + require.NoError(t, err) newTargetId := newTargetResult.Item.Id t.Logf("Created Target: %s", newTargetId) + return newTargetId } @@ -133,9 +132,9 @@ func RemoveHostSourceFromTargetCli(t testing.TB, ctx context.Context, targetId, require.NoError(t, output.Err, string(output.Stderr)) } -// AddBrokeredCredentialSourceToTargetCli uses the cli to add a credential source (credential library or +// AddCredentialSourceToTargetCli uses the cli to add a credential source (credential library or // credential) to a target -func AddBrokeredCredentialSourceToTargetCli(t testing.TB, ctx context.Context, targetId string, credentialSourceId string) { +func AddCredentialSourceToTargetCli(t testing.TB, ctx context.Context, targetId string, credentialSourceId string) { output := e2e.RunCommand(ctx, "boundary", e2e.WithArgs( "targets", "add-credential-sources", @@ -145,16 +144,3 @@ func AddBrokeredCredentialSourceToTargetCli(t testing.TB, ctx context.Context, t ) require.NoError(t, output.Err, string(output.Stderr)) } - -// RemoveBrokeredCredentialSourceFromTargetCli uses the cli to remove a credential source (credential library or -// credential) from a target -func RemoveBrokeredCredentialSourceFromTargetCli(t testing.TB, ctx context.Context, targetId string, credentialSourceId string) { - output := e2e.RunCommand(ctx, "boundary", - e2e.WithArgs( - "targets", "remove-credential-sources", - "-id", targetId, - "-brokered-credential-source", credentialSourceId, - ), - ) - require.NoError(t, output.Err, string(output.Stderr)) -} diff --git a/testing/internal/e2e/tests/aws/env_test.go b/testing/internal/e2e/tests/aws/env_test.go index af7193befe8..1be58c38a3e 100644 --- a/testing/internal/e2e/tests/aws/env_test.go +++ b/testing/internal/e2e/tests/aws/env_test.go @@ -7,13 +7,11 @@ import "github.com/kelseyhightower/envconfig" type config struct { AwsAccessKeyId string `envconfig:"E2E_AWS_ACCESS_KEY_ID" required:"true"` - AwsBucketName string `envconfig:"E2E_AWS_BUCKET_NAME" required:"true"` AwsSecretAccessKey string `envconfig:"E2E_AWS_SECRET_ACCESS_KEY" required:"true"` AwsHostSetFilter1 string `envconfig:"E2E_AWS_HOST_SET_FILTER" required:"true"` // e.g. "tag:testtag=true" AwsHostSetIps1 string `envconfig:"E2E_AWS_HOST_SET_IPS" required:"true"` // e.g. "[\"1.2.3.4\", \"2.3.4.5\"]" AwsHostSetFilter2 string `envconfig:"E2E_AWS_HOST_SET_FILTER2" required:"true"` // e.g. "tag:testtagtwo=test" AwsHostSetIps2 string `envconfig:"E2E_AWS_HOST_SET_IPS2" required:"true"` // e.g. "[\"1.2.3.4\"]" - AwsRegion string `envconfig:"E2E_AWS_REGION" required:"true"` // e.g. "us-east-1" TargetSshKeyPath string `envconfig:"E2E_SSH_KEY_PATH" required:"true"` // e.g. "/Users/username/key.pem" TargetSshUser string `envconfig:"E2E_SSH_USER" required:"true"` // e.g. "ubuntu" TargetPort string `envconfig:"E2E_SSH_PORT" required:"true"` // e.g. "22" diff --git a/testing/internal/e2e/tests/aws/worker_test.go b/testing/internal/e2e/tests/aws/worker_test.go index d2a4353231b..95452956bac 100644 --- a/testing/internal/e2e/tests/aws/worker_test.go +++ b/testing/internal/e2e/tests/aws/worker_test.go @@ -9,7 +9,6 @@ import ( "fmt" "testing" - "github.com/hashicorp/boundary/internal/target" "github.com/hashicorp/boundary/testing/internal/e2e" "github.com/hashicorp/boundary/testing/internal/e2e/boundary" "github.com/stretchr/testify/assert" @@ -31,7 +30,7 @@ func TestCliWorker(t *testing.T) { require.NoError(t, output.Err, string(output.Stderr)) }) newProjectId := boundary.CreateNewProjectCli(t, ctx, newOrgId) - newTargetId := boundary.CreateNewTargetCli(t, ctx, newProjectId, c.TargetPort, target.WithAddress(c.TargetIp)) + newTargetId := boundary.CreateNewAddressTargetCli(t, ctx, newProjectId, c.TargetPort, c.TargetIp) // Set incorrect worker filter, expect connection failure t.Logf("Setting incorrect worker filter...") diff --git a/testing/internal/e2e/tests/database/migration_test.go b/testing/internal/e2e/tests/database/migration_test.go index 8b54720a8ee..f63c8b7a54e 100644 --- a/testing/internal/e2e/tests/database/migration_test.go +++ b/testing/internal/e2e/tests/database/migration_test.go @@ -221,7 +221,7 @@ func populateBoundaryDatabase(t testing.TB, ctx context.Context, c *config, te T boundary.CreateNewStaticCredentialPasswordCli(t, ctx, newCredentialStoreId, c.TargetSshUser, "password") boundary.CreateNewStaticCredentialJsonCli(t, ctx, newCredentialStoreId, "testdata/credential.json") newCredentialsId := boundary.CreateNewStaticCredentialPrivateKeyCli(t, ctx, newCredentialStoreId, c.TargetSshUser, c.TargetSshKeyPath) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialsId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialsId) // Create vault credentials boundaryPolicyName, kvPolicyFilePath := vault.Setup(t) diff --git a/testing/internal/e2e/tests/base/auth_token_delete_test.go b/testing/internal/e2e/tests/static/auth_token_delete_test.go similarity index 99% rename from testing/internal/e2e/tests/base/auth_token_delete_test.go rename to testing/internal/e2e/tests/static/auth_token_delete_test.go index da27b3f2354..07d42e29722 100644 --- a/testing/internal/e2e/tests/base/auth_token_delete_test.go +++ b/testing/internal/e2e/tests/static/auth_token_delete_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/authenticate_primary_test.go b/testing/internal/e2e/tests/static/authenticate_primary_test.go similarity index 98% rename from testing/internal/e2e/tests/base/authenticate_primary_test.go rename to testing/internal/e2e/tests/static/authenticate_primary_test.go index 3d04b7a8c32..82b92a21ee4 100644 --- a/testing/internal/e2e/tests/base/authenticate_primary_test.go +++ b/testing/internal/e2e/tests/static/authenticate_primary_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "bytes" diff --git a/testing/internal/e2e/tests/base/bytes_up_down_empty_test.go b/testing/internal/e2e/tests/static/bytes_up_down_empty_test.go similarity index 99% rename from testing/internal/e2e/tests/base/bytes_up_down_empty_test.go rename to testing/internal/e2e/tests/static/bytes_up_down_empty_test.go index 13303a6b13d..9c5cb42f746 100644 --- a/testing/internal/e2e/tests/base/bytes_up_down_empty_test.go +++ b/testing/internal/e2e/tests/static/bytes_up_down_empty_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/bytes_up_down_test.go b/testing/internal/e2e/tests/static/bytes_up_down_test.go similarity index 99% rename from testing/internal/e2e/tests/base/bytes_up_down_test.go rename to testing/internal/e2e/tests/static/bytes_up_down_test.go index 1db4649c353..ff133a981f3 100644 --- a/testing/internal/e2e/tests/base/bytes_up_down_test.go +++ b/testing/internal/e2e/tests/static/bytes_up_down_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/target_tcp_connect_authz_token_test.go b/testing/internal/e2e/tests/static/connect_authz_token_test.go similarity index 93% rename from testing/internal/e2e/tests/base/target_tcp_connect_authz_token_test.go rename to testing/internal/e2e/tests/static/connect_authz_token_test.go index 905e06f5f40..90c2224c46c 100644 --- a/testing/internal/e2e/tests/base/target_tcp_connect_authz_token_test.go +++ b/testing/internal/e2e/tests/static/connect_authz_token_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -18,9 +18,9 @@ import ( "github.com/stretchr/testify/require" ) -// TestCliTcpTargetConnectTargetWithAuthzToken uses the boundary cli to connect to a target using the +// TestCliConnectTargetWithAuthzToken uses the boundary cli to connect to a target using the // `authz_token` option -func TestCliTcpTargetConnectTargetWithAuthzToken(t *testing.T) { +func TestCliConnectTargetWithAuthzToken(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) @@ -43,7 +43,7 @@ func TestCliTcpTargetConnectTargetWithAuthzToken(t *testing.T) { boundary.AddHostSourceToTargetCli(t, ctx, newTargetId, newHostSetId) newCredentialStoreId := boundary.CreateNewCredentialStoreStaticCli(t, ctx, newProjectId) newCredentialsId := boundary.CreateNewStaticCredentialPrivateKeyCli(t, ctx, newCredentialStoreId, c.TargetSshUser, c.TargetSshKeyPath) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialsId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialsId) // Get credentials for target output := e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base/target_tcp_connect_localhost_test.go b/testing/internal/e2e/tests/static/connect_localhost_test.go similarity index 96% rename from testing/internal/e2e/tests/base/target_tcp_connect_localhost_test.go rename to testing/internal/e2e/tests/static/connect_localhost_test.go index b9bff6a9983..bcc4cba87b6 100644 --- a/testing/internal/e2e/tests/base/target_tcp_connect_localhost_test.go +++ b/testing/internal/e2e/tests/static/connect_localhost_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestCliTcpTargetConnectTargetWithLocalhost(t *testing.T) { +func TestCliConnectTargetWithLocalhost(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) diff --git a/testing/internal/e2e/tests/base/target_tcp_connect_ssh_test.go b/testing/internal/e2e/tests/static/connect_ssh_test.go similarity index 91% rename from testing/internal/e2e/tests/base/target_tcp_connect_ssh_test.go rename to testing/internal/e2e/tests/static/connect_ssh_test.go index 59c6ae6ffb1..f09733ec53f 100644 --- a/testing/internal/e2e/tests/base/target_tcp_connect_ssh_test.go +++ b/testing/internal/e2e/tests/static/connect_ssh_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -16,10 +16,10 @@ import ( "github.com/stretchr/testify/require" ) -// TestCliTcpTargetConnectTargetWithSsh uses the boundary cli to create a credential using boundary's +// TestCliConnectTargetWithSsh uses the boundary cli to create a credential using boundary's // built-in credential store. The test attaches that credential to a target and attempts to connect // to that target using those credentials. -func TestCliTcpTargetConnectTargetWithSsh(t *testing.T) { +func TestCliConnectTargetWithSsh(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) @@ -42,7 +42,7 @@ func TestCliTcpTargetConnectTargetWithSsh(t *testing.T) { boundary.AddHostSourceToTargetCli(t, ctx, newTargetId, newHostSetId) newCredentialStoreId := boundary.CreateNewCredentialStoreStaticCli(t, ctx, newProjectId) newCredentialsId := boundary.CreateNewStaticCredentialPrivateKeyCli(t, ctx, newCredentialStoreId, c.TargetSshUser, c.TargetSshKeyPath) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialsId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialsId) // Get credentials for target output := e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base/target_tcp_connect_test.go b/testing/internal/e2e/tests/static/connect_test.go similarity index 51% rename from testing/internal/e2e/tests/base/target_tcp_connect_test.go rename to testing/internal/e2e/tests/static/connect_test.go index 95ec27859be..1084b0b64bc 100644 --- a/testing/internal/e2e/tests/base/target_tcp_connect_test.go +++ b/testing/internal/e2e/tests/static/connect_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -16,10 +16,10 @@ import ( "github.com/hashicorp/boundary/testing/internal/e2e/boundary" ) -// TestCliTcpTargetConnectTargetBasic uses the boundary cli to create a number of +// TestCliConnectTargetBasic uses the boundary cli to create a number of // supporting objects to connect to a target. It then attempts to connect to // that target and verifies that the connection was successful. -func TestCliTcpTargetConnectTargetBasic(t *testing.T) { +func TestCliConnectTargetBasic(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) @@ -65,84 +65,7 @@ func TestCliTcpTargetConnectTargetBasic(t *testing.T) { t.Log("Successfully connected to target") } -// TestCliTcpTargetConnectTargetBasic uses the boundary cli to create a number of -// supporting objects to connect to a target. It then attempts to connect to -// that target via the combination of target name and target scope name, -// and verifies that the connection was successful. -func TestCliTcpTargetConnectTargetViaTargetAndScopeNames(t *testing.T) { - e2e.MaybeSkipTest(t) - c, err := loadTestConfig() - require.NoError(t, err) - - ctx := context.Background() - boundary.AuthenticateAdminCli(t, ctx) - newOrgId := boundary.CreateNewOrgCli(t, ctx) - t.Cleanup(func() { - ctx := context.Background() - boundary.AuthenticateAdminCli(t, ctx) - output := e2e.RunCommand(ctx, "boundary", e2e.WithArgs("scopes", "delete", "-id", newOrgId)) - require.NoError(t, output.Err, string(output.Stderr)) - }) - testProjectName := `E2E/Project-With\Name` - testTargetName := `E2E/Test-Target-With\Name` - newProjectId := boundary.CreateNewProjectCli(t, ctx, newOrgId, boundary.WithName(testProjectName)) - newHostCatalogId := boundary.CreateNewHostCatalogCli(t, ctx, newProjectId) - newHostSetId := boundary.CreateNewHostSetCli(t, ctx, newHostCatalogId) - newHostId := boundary.CreateNewHostCli(t, ctx, newHostCatalogId, c.TargetIp) - boundary.AddHostToHostSetCli(t, ctx, newHostSetId, newHostId) - newTargetId := boundary.CreateNewTargetCli(t, ctx, newProjectId, c.TargetPort, target.WithName(testTargetName)) - boundary.AddHostSourceToTargetCli(t, ctx, newTargetId, newHostSetId) - - // Connect to target via target and scope names, and print host's IP address - output := e2e.RunCommand(ctx, "boundary", - e2e.WithArgs( - "connect", - "-target-name", testTargetName, - "-target-scope-name", testProjectName, - "-exec", "/usr/bin/ssh", "--", - "-l", c.TargetSshUser, - "-i", c.TargetSshKeyPath, - "-o", "UserKnownHostsFile=/dev/null", - "-o", "StrictHostKeyChecking=no", - "-o", "IdentitiesOnly=yes", // forces the use of the provided key - "-p", "{{boundary.port}}", // this is provided by boundary - "{{boundary.ip}}", - "hostname", "-i", - ), - ) - require.NoError(t, output.Err, string(output.Stderr)) - - parts := strings.Fields(string(output.Stdout)) - hostIp := parts[len(parts)-1] - require.Equal(t, c.TargetIp, hostIp, "SSH session did not return expected output") - t.Log("Successfully connected to target by its name and scope name") - - // Connect to target via target name and scope ID, and print host's IP address - output = e2e.RunCommand(ctx, "boundary", - e2e.WithArgs( - "connect", - "-target-name", testTargetName, - "-target-scope-id", newProjectId, - "-exec", "/usr/bin/ssh", "--", - "-l", c.TargetSshUser, - "-i", c.TargetSshKeyPath, - "-o", "UserKnownHostsFile=/dev/null", - "-o", "StrictHostKeyChecking=no", - "-o", "IdentitiesOnly=yes", // forces the use of the provided key - "-p", "{{boundary.port}}", // this is provided by boundary - "{{boundary.ip}}", - "hostname", "-i", - ), - ) - require.NoError(t, output.Err, string(output.Stderr)) - - parts = strings.Fields(string(output.Stdout)) - hostIp = parts[len(parts)-1] - require.Equal(t, c.TargetIp, hostIp, "SSH session did not return expected output") - t.Log("Successfully connected to target by its name and scope ID") -} - -func TestCliTcpTargetConnectTargetWithTargetClientPort(t *testing.T) { +func TestCliConnectTargetWithTargetClientPort(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) diff --git a/testing/internal/e2e/tests/base/credential_store_test.go b/testing/internal/e2e/tests/static/credential_store_test.go similarity index 96% rename from testing/internal/e2e/tests/base/credential_store_test.go rename to testing/internal/e2e/tests/static/credential_store_test.go index b6d3e8713c2..7e4a80e8b11 100644 --- a/testing/internal/e2e/tests/base/credential_store_test.go +++ b/testing/internal/e2e/tests/static/credential_store_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -79,9 +79,9 @@ func TestCliStaticCredentialStore(t *testing.T) { require.True(t, newSessionAuthorizationResult.Item.Credentials == nil) // Add credentials to target - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, privateKeyCredentialsId) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, jsonCredentialsId) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, pwCredentialsId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, privateKeyCredentialsId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, jsonCredentialsId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, pwCredentialsId) // Get credentials for target output = e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base/env_test.go b/testing/internal/e2e/tests/static/env_test.go similarity index 96% rename from testing/internal/e2e/tests/base/env_test.go rename to testing/internal/e2e/tests/static/env_test.go index f3174ba3310..49964079d91 100644 --- a/testing/internal/e2e/tests/base/env_test.go +++ b/testing/internal/e2e/tests/static/env_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import "github.com/kelseyhightower/envconfig" diff --git a/testing/internal/e2e/tests/base/key_destruction_test.go b/testing/internal/e2e/tests/static/key_destruction_test.go similarity index 99% rename from testing/internal/e2e/tests/base/key_destruction_test.go rename to testing/internal/e2e/tests/static/key_destruction_test.go index ad863c592d4..804726dbe0f 100644 --- a/testing/internal/e2e/tests/base/key_destruction_test.go +++ b/testing/internal/e2e/tests/static/key_destruction_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_cancel_admin_test.go b/testing/internal/e2e/tests/static/session_cancel_admin_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_cancel_admin_test.go rename to testing/internal/e2e/tests/static/session_cancel_admin_test.go index f9938076199..39967bf0d2f 100644 --- a/testing/internal/e2e/tests/base/session_cancel_admin_test.go +++ b/testing/internal/e2e/tests/static/session_cancel_admin_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_cancel_group_test.go b/testing/internal/e2e/tests/static/session_cancel_group_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_cancel_group_test.go rename to testing/internal/e2e/tests/static/session_cancel_group_test.go index 1a1ec1a184a..3566f12ae58 100644 --- a/testing/internal/e2e/tests/base/session_cancel_group_test.go +++ b/testing/internal/e2e/tests/static/session_cancel_group_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_cancel_user_test.go b/testing/internal/e2e/tests/static/session_cancel_user_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_cancel_user_test.go rename to testing/internal/e2e/tests/static/session_cancel_user_test.go index a60f2bd7463..37ddc3a9ba1 100644 --- a/testing/internal/e2e/tests/base/session_cancel_user_test.go +++ b/testing/internal/e2e/tests/static/session_cancel_user_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_end_delete_host_set_test.go b/testing/internal/e2e/tests/static/session_end_delete_host_set_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_end_delete_host_set_test.go rename to testing/internal/e2e/tests/static/session_end_delete_host_set_test.go index 9095287f62c..3b625f2071f 100644 --- a/testing/internal/e2e/tests/base/session_end_delete_host_set_test.go +++ b/testing/internal/e2e/tests/static/session_end_delete_host_set_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_end_delete_host_test.go b/testing/internal/e2e/tests/static/session_end_delete_host_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_end_delete_host_test.go rename to testing/internal/e2e/tests/static/session_end_delete_host_test.go index 46c87a6d6c7..15bf4f1a869 100644 --- a/testing/internal/e2e/tests/base/session_end_delete_host_test.go +++ b/testing/internal/e2e/tests/static/session_end_delete_host_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_end_delete_project_test.go b/testing/internal/e2e/tests/static/session_end_delete_project_test.go similarity index 95% rename from testing/internal/e2e/tests/base/session_end_delete_project_test.go rename to testing/internal/e2e/tests/static/session_end_delete_project_test.go index 7c38695e656..d2f88b64c69 100644 --- a/testing/internal/e2e/tests/base/session_end_delete_project_test.go +++ b/testing/internal/e2e/tests/static/session_end_delete_project_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -9,7 +9,6 @@ import ( "time" "github.com/hashicorp/boundary/internal/session" - "github.com/hashicorp/boundary/internal/target" "github.com/hashicorp/boundary/testing/internal/e2e" "github.com/hashicorp/boundary/testing/internal/e2e/boundary" "github.com/stretchr/testify/assert" @@ -35,7 +34,7 @@ func TestCliSessionEndWhenProjectIsDeleted(t *testing.T) { require.NoError(t, output.Err, string(output.Stderr)) }) newProjectId := boundary.CreateNewProjectCli(t, ctx, newOrgId) - newTargetId := boundary.CreateNewTargetCli(t, ctx, newProjectId, c.TargetPort, target.WithAddress(c.TargetIp)) + newTargetId := boundary.CreateNewAddressTargetCli(t, ctx, newProjectId, c.TargetPort, c.TargetIp) acctName := "e2e-account" newAccountId, acctPassword := boundary.CreateNewAccountCli(t, ctx, bc.AuthMethodId, acctName) t.Cleanup(func() { diff --git a/testing/internal/e2e/tests/base/session_end_delete_target_test.go b/testing/internal/e2e/tests/static/session_end_delete_target_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_end_delete_target_test.go rename to testing/internal/e2e/tests/static/session_end_delete_target_test.go index 3e2dc653057..d6f23850464 100644 --- a/testing/internal/e2e/tests/base/session_end_delete_target_test.go +++ b/testing/internal/e2e/tests/static/session_end_delete_target_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/session_end_delete_user_test.go b/testing/internal/e2e/tests/static/session_end_delete_user_test.go similarity index 99% rename from testing/internal/e2e/tests/base/session_end_delete_user_test.go rename to testing/internal/e2e/tests/static/session_end_delete_user_test.go index 19e470a49d5..612eab529fc 100644 --- a/testing/internal/e2e/tests/base/session_end_delete_user_test.go +++ b/testing/internal/e2e/tests/static/session_end_delete_user_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base/target_tcp_address_test.go b/testing/internal/e2e/tests/static/target_address_test.go similarity index 97% rename from testing/internal/e2e/tests/base/target_tcp_address_test.go rename to testing/internal/e2e/tests/static/target_address_test.go index fdb20e381ad..ab2634eac2b 100644 --- a/testing/internal/e2e/tests/base/target_tcp_address_test.go +++ b/testing/internal/e2e/tests/static/target_address_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" @@ -10,7 +10,6 @@ import ( "strings" "testing" - "github.com/hashicorp/boundary/internal/target" "github.com/hashicorp/boundary/testing/internal/e2e" "github.com/hashicorp/boundary/testing/internal/e2e/boundary" "github.com/stretchr/testify/require" @@ -33,7 +32,7 @@ func TestCliCreateUpdateTargetAddress(t *testing.T) { require.NoError(t, output.Err, string(output.Stderr)) }) newProjectId := boundary.CreateNewProjectCli(t, ctx, newOrgId) - newTargetId := boundary.CreateNewTargetCli(t, ctx, newProjectId, c.TargetPort, target.WithAddress(c.TargetIp)) + newTargetId := boundary.CreateNewAddressTargetCli(t, ctx, newProjectId, c.TargetPort, c.TargetIp) // Connect to target and print host's IP address output := e2e.RunCommand(ctx, "boundary", @@ -152,7 +151,7 @@ func TestCliTargetAddressToHostSource(t *testing.T) { newHostSetId := boundary.CreateNewHostSetCli(t, ctx, newHostCatalogId) newHostId := boundary.CreateNewHostCli(t, ctx, newHostCatalogId, c.TargetIp) boundary.AddHostToHostSetCli(t, ctx, newHostSetId, newHostId) - newTargetId := boundary.CreateNewTargetCli(t, ctx, newProjectId, c.TargetPort, target.WithAddress(c.TargetIp)) + newTargetId := boundary.CreateNewAddressTargetCli(t, ctx, newProjectId, c.TargetPort, c.TargetIp) // Connect to target and print host's IP address output := e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base/testdata/credential.json b/testing/internal/e2e/tests/static/testdata/credential.json similarity index 100% rename from testing/internal/e2e/tests/base/testdata/credential.json rename to testing/internal/e2e/tests/static/testdata/credential.json diff --git a/testing/internal/e2e/tests/base/version_test.go b/testing/internal/e2e/tests/static/version_test.go similarity index 98% rename from testing/internal/e2e/tests/base/version_test.go rename to testing/internal/e2e/tests/static/version_test.go index 85678120510..36f10aebd6e 100644 --- a/testing/internal/e2e/tests/base/version_test.go +++ b/testing/internal/e2e/tests/static/version_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_test +package static_test import ( "context" diff --git a/testing/internal/e2e/tests/base_with_vault/target_tcp_connect_authz_token_test.go b/testing/internal/e2e/tests/static_with_vault/connect_authz_token_test.go similarity index 91% rename from testing/internal/e2e/tests/base_with_vault/target_tcp_connect_authz_token_test.go rename to testing/internal/e2e/tests/static_with_vault/connect_authz_token_test.go index e0f8687ac1f..0b8e2a4b030 100644 --- a/testing/internal/e2e/tests/base_with_vault/target_tcp_connect_authz_token_test.go +++ b/testing/internal/e2e/tests/static_with_vault/connect_authz_token_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_with_vault_test +package static_with_vault_test import ( "context" @@ -19,11 +19,11 @@ import ( "github.com/stretchr/testify/require" ) -// TestCliTcpTargetVaultConnectTargetWithAuthzToken uses the boundary and vault clis to add secrets +// TestCliVaultConnectTargetWithAuthzToken uses the boundary and vault clis to add secrets // management for a target. The test sets up vault as a credential store, creates a set of // credentials in vault to be attached to a target, and attempts to connect to that target (with the // authz-token option) using those credentials. -func TestCliTcpTargetVaultConnectTargetWithAuthzToken(t *testing.T) { +func TestCliVaultConnectTargetWithAuthzToken(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) @@ -55,7 +55,7 @@ func TestCliTcpTargetVaultConnectTargetWithAuthzToken(t *testing.T) { }) output := e2e.RunCommand(ctx, "vault", - e2e.WithArgs("secrets", "enable", fmt.Sprintf("-path=%s", c.VaultSecretPath), "kv-v2"), + e2e.WithArgs("secrets", "enable", "-path="+c.VaultSecretPath, "kv-v2"), ) require.NoError(t, output.Err, string(output.Stderr)) t.Cleanup(func() { @@ -81,8 +81,8 @@ func TestCliTcpTargetVaultConnectTargetWithAuthzToken(t *testing.T) { e2e.WithArgs( "token", "create", "-no-default-policy=true", - fmt.Sprintf("-policy=%s", boundaryPolicyName), - fmt.Sprintf("-policy=%s", kvPolicyName), + "-policy="+boundaryPolicyName, + "-policy="+kvPolicyName, "-orphan=true", "-period=20m", "-renewable=true", @@ -104,7 +104,7 @@ func TestCliTcpTargetVaultConnectTargetWithAuthzToken(t *testing.T) { e2e.WithArgs( "credential-libraries", "create", "vault", "-credential-store-id", newCredentialStoreId, - "-vault-path", fmt.Sprintf("%s/data/%s", c.VaultSecretPath, privateKeySecretName), + "-vault-path", c.VaultSecretPath+"/data/"+privateKeySecretName, "-name", "e2e Automated Test Vault Credential Library", "-credential-type", "ssh_private_key", "-format", "json", @@ -117,7 +117,7 @@ func TestCliTcpTargetVaultConnectTargetWithAuthzToken(t *testing.T) { newCredentialLibraryId := newCredentialLibraryResult.Item.Id t.Logf("Created Credential Library: %s", newCredentialLibraryId) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialLibraryId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialLibraryId) // Get credentials for target output = e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base_with_vault/target_tcp_connect_ssh_test.go b/testing/internal/e2e/tests/static_with_vault/connect_ssh_test.go similarity index 90% rename from testing/internal/e2e/tests/base_with_vault/target_tcp_connect_ssh_test.go rename to testing/internal/e2e/tests/static_with_vault/connect_ssh_test.go index 0892b500e87..eeaa933c86b 100644 --- a/testing/internal/e2e/tests/base_with_vault/target_tcp_connect_ssh_test.go +++ b/testing/internal/e2e/tests/static_with_vault/connect_ssh_test.go @@ -1,12 +1,11 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_with_vault_test +package static_with_vault_test import ( "context" "encoding/json" - "fmt" "os" "testing" @@ -19,11 +18,11 @@ import ( "github.com/stretchr/testify/require" ) -// TestCliTcpTargetVaultConnectTargetWithSsh uses the boundary and vault clis to add secrets management for a +// TestCliVaultConnectTargetWithSsh uses the boundary and vault clis to add secrets management for a // target. The test sets up vault as a credential store, creates a set of credentials in vault to be // attached to a target, and attempts to connect to that target (with the ssh option) using those // credentials. -func TestCliTcpTargetVaultConnectTargetWithSsh(t *testing.T) { +func TestCliVaultConnectTargetWithSsh(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) @@ -55,7 +54,7 @@ func TestCliTcpTargetVaultConnectTargetWithSsh(t *testing.T) { }) output := e2e.RunCommand(ctx, "vault", - e2e.WithArgs("secrets", "enable", fmt.Sprintf("-path=%s", c.VaultSecretPath), "kv-v2"), + e2e.WithArgs("secrets", "enable", "-path="+c.VaultSecretPath, "kv-v2"), ) require.NoError(t, output.Err, string(output.Stderr)) t.Cleanup(func() { @@ -81,8 +80,8 @@ func TestCliTcpTargetVaultConnectTargetWithSsh(t *testing.T) { e2e.WithArgs( "token", "create", "-no-default-policy=true", - fmt.Sprintf("-policy=%s", boundaryPolicyName), - fmt.Sprintf("-policy=%s", kvPolicyName), + "-policy="+boundaryPolicyName, + "-policy="+kvPolicyName, "-orphan=true", "-period=20m", "-renewable=true", @@ -104,7 +103,7 @@ func TestCliTcpTargetVaultConnectTargetWithSsh(t *testing.T) { e2e.WithArgs( "credential-libraries", "create", "vault", "-credential-store-id", newCredentialStoreId, - "-vault-path", fmt.Sprintf("%s/data/%s", c.VaultSecretPath, privateKeySecretName), + "-vault-path", c.VaultSecretPath+"/data/"+privateKeySecretName, "-name", "e2e Automated Test Vault Credential Library", "-credential-type", "ssh_private_key", "-format", "json", @@ -118,7 +117,7 @@ func TestCliTcpTargetVaultConnectTargetWithSsh(t *testing.T) { t.Logf("Created Credential Library: %s", newCredentialLibraryId) // Add brokered credentials to target - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialLibraryId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialLibraryId) // Get credentials for target output = e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base_with_vault/target_tcp_connect_test.go b/testing/internal/e2e/tests/static_with_vault/connect_test.go similarity index 91% rename from testing/internal/e2e/tests/base_with_vault/target_tcp_connect_test.go rename to testing/internal/e2e/tests/static_with_vault/connect_test.go index f33c56dc312..3a9022d6eca 100644 --- a/testing/internal/e2e/tests/base_with_vault/target_tcp_connect_test.go +++ b/testing/internal/e2e/tests/static_with_vault/connect_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_with_vault_test +package static_with_vault_test import ( "context" @@ -20,11 +20,11 @@ import ( "github.com/stretchr/testify/require" ) -// TestCliTcpTargetVaultConnectTarget uses the boundary and vault clis to add secrets management +// TestCliVaultConnectTarget uses the boundary and vault clis to add secrets management // for a target. The test sets up vault as a credential store, creates a set of credentials // in vault to be attached to a target, and attempts to connect to that target using those // credentials. -func TestCliTcpTargetVaultConnectTarget(t *testing.T) { +func TestCliVaultConnectTarget(t *testing.T) { e2e.MaybeSkipTest(t) c, err := loadTestConfig() require.NoError(t, err) @@ -56,7 +56,7 @@ func TestCliTcpTargetVaultConnectTarget(t *testing.T) { }) output := e2e.RunCommand(ctx, "vault", - e2e.WithArgs("secrets", "enable", fmt.Sprintf("-path=%s", c.VaultSecretPath), "kv-v2"), + e2e.WithArgs("secrets", "enable", "-path="+c.VaultSecretPath, "kv-v2"), ) require.NoError(t, output.Err, string(output.Stderr)) t.Cleanup(func() { @@ -82,8 +82,8 @@ func TestCliTcpTargetVaultConnectTarget(t *testing.T) { e2e.WithArgs( "token", "create", "-no-default-policy=true", - fmt.Sprintf("-policy=%s", boundaryPolicyName), - fmt.Sprintf("-policy=%s", kvPolicyName), + "-policy="+boundaryPolicyName, + "-policy="+kvPolicyName, "-orphan=true", "-period=20m", "-renewable=true", @@ -105,7 +105,7 @@ func TestCliTcpTargetVaultConnectTarget(t *testing.T) { e2e.WithArgs( "credential-libraries", "create", "vault", "-credential-store-id", newCredentialStoreId, - "-vault-path", fmt.Sprintf("%s/data/%s", c.VaultSecretPath, privateKeySecretName), + "-vault-path", c.VaultSecretPath+"/data/"+privateKeySecretName, "-name", "e2e Automated Test Vault Credential Library", "-credential-type", "ssh_private_key", "-format", "json", @@ -119,7 +119,7 @@ func TestCliTcpTargetVaultConnectTarget(t *testing.T) { t.Logf("Created Credential Library: %s", newCredentialLibraryId) // Add brokered credentials to target - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialLibraryId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newCredentialLibraryId) // Get credentials for target output = e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base_with_vault/credential_store_test.go b/testing/internal/e2e/tests/static_with_vault/credential_store_test.go similarity index 98% rename from testing/internal/e2e/tests/base_with_vault/credential_store_test.go rename to testing/internal/e2e/tests/static_with_vault/credential_store_test.go index ec8d7786256..afc3fd85f55 100644 --- a/testing/internal/e2e/tests/base_with_vault/credential_store_test.go +++ b/testing/internal/e2e/tests/static_with_vault/credential_store_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_with_vault_test +package static_with_vault_test import ( "context" @@ -145,8 +145,8 @@ func TestCliVaultCredentialStore(t *testing.T) { require.True(t, newSessionAuthorizationResult.Item.Credentials == nil) // Add credentials to target - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newPrivateKeyCredentialLibraryId) - boundary.AddBrokeredCredentialSourceToTargetCli(t, ctx, newTargetId, newPasswordCredentialLibraryId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newPrivateKeyCredentialLibraryId) + boundary.AddCredentialSourceToTargetCli(t, ctx, newTargetId, newPasswordCredentialLibraryId) // Get credentials for target output = e2e.RunCommand(ctx, "boundary", diff --git a/testing/internal/e2e/tests/base_with_vault/env_test.go b/testing/internal/e2e/tests/static_with_vault/env_test.go similarity index 96% rename from testing/internal/e2e/tests/base_with_vault/env_test.go rename to testing/internal/e2e/tests/static_with_vault/env_test.go index e1f9370fed2..e8bef4497d6 100644 --- a/testing/internal/e2e/tests/base_with_vault/env_test.go +++ b/testing/internal/e2e/tests/static_with_vault/env_test.go @@ -1,7 +1,7 @@ // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package base_with_vault_test +package static_with_vault_test import "github.com/kelseyhightower/envconfig" diff --git a/testing/internal/e2e/tests/base_with_vault/testdata/boundary-controller-policy.hcl b/testing/internal/e2e/tests/static_with_vault/testdata/boundary-controller-policy.hcl similarity index 100% rename from testing/internal/e2e/tests/base_with_vault/testdata/boundary-controller-policy.hcl rename to testing/internal/e2e/tests/static_with_vault/testdata/boundary-controller-policy.hcl diff --git a/version/VERSION b/version/VERSION index c317a91891f..54d1a4f2a4a 100644 --- a/version/VERSION +++ b/version/VERSION @@ -1 +1 @@ -0.13.1 +0.13.0 diff --git a/version/feature_manager.go b/version/feature_manager.go index e6bce7803bc..397c87d25b1 100644 --- a/version/feature_manager.go +++ b/version/feature_manager.go @@ -21,7 +21,6 @@ const ( UseTargetIdForHostId RequireVersionInWorkerInfo SshSessionRecording - SupportIdInGrants ) var featureMap map[Feature]MetadataConstraint @@ -73,12 +72,6 @@ func init() { featureMap[SshSessionRecording] = MetadataConstraint{ Constraints: mustNewConstraints(">= 0.13.0"), } - - // Warn until 0.15 about using the now-deprecated id field in grants; after - // that disallow it - featureMap[SupportIdInGrants] = MetadataConstraint{ - Constraints: mustNewConstraints("< 0.15.0"), - } } func mustNewConstraints(v string) gvers.Constraints { diff --git a/version/version_base.go b/version/version_base.go index 555995b4b71..c594610ccf6 100644 --- a/version/version_base.go +++ b/version/version_base.go @@ -13,7 +13,7 @@ var ( // Version is the base version // Default values - set when building locally (at build time) - Version = "0.13.1" + Version = "0.13.0" // VersionPrerelease is also set at compile time, similarly to Version. VersionPrerelease string diff --git a/website/content/docs/concepts/auditing.mdx b/website/content/docs/concepts/auditing.mdx index a4034cf14b9..1a434ef90ab 100644 --- a/website/content/docs/concepts/auditing.mdx +++ b/website/content/docs/concepts/auditing.mdx @@ -45,6 +45,31 @@ A controller retrieves the contents of a recording from a worker with access to The controller decodes the contents of the recording into a format that is usable by the player. The player then retrieves the data from the controller. +### Session recording audit events + +Boundary emits audit events for actions performed by users. Here are **some** of the possible event field types that are emitted: + +- `timestamp` - The timestamp of the event +- `auth` - Authentication information about the user who performed the action. +- `response.details.item.connection_recordings.` + - `id` - Session recording ID. + - `channel_recordings.id` - Session recording channel ID. + - `start_time.seconds` - Start time of session recording. + - `end_time.seconds` - End time of session recording. + - `bytes_up` - Upload bytes during session. + - `bytes_down` - Download bytes during session. + - `channel_recordings.duration.seconds`- Length of time a session took, recorded in seconds. +- `request_info.client_ip` - The client IP address used by the user. +- `response.details.item.create_time_values.` + - `target.name` - The name of the targets. + - `target.id` - The ID of the target accessed during the recording. + - `target.type` - The type of protocol used. + - `target.scope.name` - Name of the scope the target accessed belongs to. + - `target.scope.parent_scope_id` - Parent ID of the scope the target accessed belongs to. +- `response.details.item.Attrs.SshTargetAttributes.` + - `storage_bucket_id.value` - Storage bucket ID attached to a target that is used for storing session recordings. + - `enable_session_recording.value` - Determines if session recording is enabled on a target. + ## Storage buckets A resource known as a [storage bucket](/boundary/docs/concepts/domain-model/storage-buckets) is used to store the recorded sessions. @@ -75,12 +100,254 @@ Boundary creates the top-level directory of the BSR as `.bsr`. This t A BSR connections directory contains a summary of connections, as well as inbound and outbound requests. If you use a multiplexed protocol, there are subdirectories for the channels. +``` +└── sr_iNCdGSREeX.bsr + ├── SHA256SUM + ├── SHA256SUM.sig + ├── bsrKey.pub + ├── cr_3bB78W53Y9.connection + │ ├── SHA256SUM + │ ├── SHA256SUM.sig + │ ├── chr_VUnVuVnITu.channel + │ │ ├── SHA256SUM + │ │ ├── SHA256SUM.sig + │ │ ├── channel-recording-summary.json + │ │ ├── channel-recording.meta + │ │ ├── messages-inbound.data + │ │ ├── messages-outbound.data + │ │ ├── requests-inbound.data + │ │ └── requests-outbound.data + │ ├── chr_nITuVUnVuV.channel + │ │ ├── SHA256SUM + │ │ ├── SHA256SUM.sig + │ │ ├── channel-recording-summary.json + │ │ ├── channel-recording.meta + │ │ ├── messages-inbound.data + │ │ ├── messages-outbound.data + │ │ ├── requests-inbound.data + │ │ └── requests-outbound.data + │ ├── connection-recording-summary.json + │ ├── connection-recording.meta + │ ├── requests-inbound.data + │ └── requests-outbound.data + ├── cr_W53Y93bB78.connection + │ ├── SHA256SUM + │ ├── SHA256SUM.sig + │ ├── chr_uVVuUITnVn.channel + │ │ ├── SHA256SUM + │ │ ├── SHA256SUM.sig + │ │ ├── channel-recording-summary.json + │ │ ├── channel-recording.meta + │ │ ├── messages-inbound.data + │ │ ├── messages-outbound.data + │ │ ├── requests-inbound.data + │ │ └── requests-outbound.data + │ ├── connection-recording-summary.json + │ ├── connection-recording.meta + │ ├── requests-inbound.data + │ └── requests-outbound.data + ├── pubKeyBsrSignature.sign + ├── pubKeySelfSignature.sign + ├── session-meta.json + ├── session-recording-summary.json + ├── session-recording.meta + ├── wrappedBsrKey + └── wrappedPrivKey +``` + +### BSR Session folder +``` +└── sr_iNCdGSREeX.bsr + ├── SHA256SUM + ├── SHA256SUM.sig + ├── bsrKey.pub + ├── cr_3bB78W53Y9.connection + ├── pubKeyBsrSignature.sign + ├── pubKeySelfSignature.sign + ├── session-meta.json + ├── session-recording-summary.json + ├── session-recording.meta + ├── wrappedBsrKey + └── wrappedPrivKey +``` + +`session-recording.meta` file example: + +``` +id: sr_iNCdGSREeX +protocol: BSSH +connection: cr_3bB78W53Y9.connection +``` + +`session-meta.json` file example: + +``` +{ + "PublicId": "s_HQbVb8fJaM", + "Endpoint": "ssh://openssh-server:2222", + "User": { + "PublicId": "u_5Ry4oDiEVp", + "Scope": { + "PublicId": "global", + "Name": "global", + "Description": "Global Scope", + "Type": "global", + "ParentId": "", + "PrimaryAuthMethodId": "ampw_CdIa5KR9iw" + }, + "Name": "admin", + "Description": "Initial admin user within the \"global\" scope" + }, + "Target": { + "PublicId": "tssh_TIx4ENZMdA", + "Scope": { + "PublicId": "p_7Qe46uNMYX", + "Name": "session-recording-project", + "Description": "", + "Type": "project", + "ParentId": "o_yK7GoA6OG2", + "PrimaryAuthMethodId": "" + }, + "Name": "session-recording-target", + "Description": "", + "DefaultPort": 2222, + "DefaultClientPort": 0, + "SessionMaxSeconds": 28800, + "SessionConnectionLimit": -1, + "WorkerFilter": "", + "EgressWorkerFilter": "", + "IngressWorkerFilter": "\"pki\" in \"/tags/type\"", + "EnableSessionRecording": true, + "StorageBucketId": "sb_vqn871JdQf" + }, + "Worker": { + "PublicId": "w_ogOQt0rsuQ", + "Version": "0.13.0", + "Sha": "" + }, + "StaticHost": null, + "DynamicHost": null, + "StaticJSONCredentials": null, + "StaticUsernamePasswordCredentials": [ + { + "PublicId": "credup_gdzeB5UWJv", + "Name": "", + "Description": "", + "Username": "username", + "PasswordHmac": "PasswordHmac, + "Purposes": [ + "injected_application" + ], + "CredentialStore": { + "PublicId": "csst_agwIT97uZ7", + "ProjectId": "p_7Qe46uNMYX", + "Name": "ssh static store", + "Description": "SSH Static Cred store" + } + } + ], + "StaticSshPrivateKeyCredentials": null, + "VaultGenericLibraries": null, + "VaultSshCertificateLibraries": null +} +``` + +`session-recording.json` file example: + +``` +id: sr_iNCdGSREeX +protocol: BSSH +connection: cr_3bB78W53Y9.connection +``` + +`SHA256SUM` and `SHA256SUM.sig` files are used for cryptographically verifying the contents of this directory. +For more information on `*.sign`, `bsrKey.pub`, `wrappedBsrKey`, and `wrappedPrivKey` files, refer to [Validating the integrity of session recordings](/boundary/docs/operations/manage-recorded-sessions). + +### BSR Connection folder + +``` +└── cr_W53Y93bB78.connection + ├── SHA256SUM + ├── SHA256SUM.sig + ├── chr_uVVuUITnVn.channel + ├── connection-recording-summary.json + ├── connection-recording.meta + ├── requests-inbound.data + └── requests-outbound.data +``` + +`connection-recording.meta` file example: + +``` +id: cr_W53Y93bB78 +requests: outbound +requests: inbound +channel: chr_uVVuUITnVn.channel +``` + +`connection-recording-summary.json` file example: + +``` +{ + "Id": "cr_W53Y93bB78", + "ChannelCount": 1, + "StartTime": "2023-07-13T20:21:49.164105381Z", + "EndTime": "2023-07-13T20:22:37.241911112Z", + "BytesUp": 125, + "BytesDown": 748, + "Errors": null +} +``` + +`*.data` files are binary files containing all data transmitted during a session. +`SHA256SUM` and `SHA256SUM.sig` files are used for cryptographically verifying the contents of this directory. + +### BSR Channel folder + +``` +└── chr_uVVuUITnVn.channel + ├── SHA256SUM + ├── SHA256SUM.sig + ├── channel-recording-summary.json + ├── channel-recording.meta + ├── messages-inbound.data + ├── messages-outbound.data + ├── requests-inbound.data + └── requests-outbound.data +``` + +`channel-recording.meta` file example: + +``` +id: chr_uVVuUITnVn +channelType: session +messages: outbound +requests: outbound +messages: inbound +requests: inbound +``` -BSR directories are validated based on the contents in the directory. -Each BSR directory contains a SHA256SUMS and SHA256SUMS.sign file that can be used to cryptographically verify the BSR directory's contents. -The SHA256SUMS file contains rows of file names paired with a checksum for the file contents. -The SHA256SUMS.sign is a copy of the SHA256SUMS file, signed with the BSR's private key. +`channel-recording-summary.json` file example: +``` +{ + "ChannelSummary": { + "Id": "chr_uVVuUITnVn", + "ConnectionRecordingId": "cr_W53Y93bB78", + "StartTime": "2023-07-13T20:21:49.230916214Z", + "EndTime": "2023-07-13T20:22:37.229379944Z", + "BytesUp": 125, + "BytesDown": 748, + "ChannelType": "session" + }, + "SessionProgram": "shell", + "SubsystemName": "", + "ExecProgram": "", + "FileTransferDirection": "not applicable" +} +``` +`*.data` files are binary files containing all data transmitted during a session. +`SHA256SUM` and `SHA256SUM.sig` files are used for cryptographically verifying the contents of this directory. For more information, refer to the [overview of configuring session recording](/boundary/docs/configuration/session-recording). diff --git a/website/content/docs/concepts/domain-model/auth-methods.mdx b/website/content/docs/concepts/domain-model/auth-methods.mdx index afdab1d5841..1cc2464f6e9 100644 --- a/website/content/docs/concepts/domain-model/auth-methods.mdx +++ b/website/content/docs/concepts/domain-model/auth-methods.mdx @@ -31,65 +31,119 @@ The password auth method has the following additional attributes: - `min_password_length` - (required) The default is 8. +### OIDC auth method attributes + +The OIDC auth method has the following additional attributes: + +- `account_claim_maps` (optional list) These are a map from custom + claims to the standard claims of sub, name, and email. These maps are + represented as key=value where the key equals the provider from-claim and the + value equals the Boundary to-claim. For example "oid=sub". You can specify this attribute + multiple times for different to-claims. + +- `allowed_audiences` (optional list) Audiences for which provider + responses are allowed. + +- `api_url_prefix` (required) The API prefix to use when generating callback URLs + for the provider. You should set the value to an address that allows the provider to reach + the controller. + +- `callback_url` (output read-only) The URL that should be provided to the IdP + for callbacks. + +- `claims_scopes` (optional list) The claims scope requested. You can specify this attribute + multiple times. + +- `client_id` (required) The OAuth 2.0 client identifier this auth method should + use with the provider. + +- `client-secret` (required) The corresponding client secret. + +- `client_secret_hmac` (output read-only) The HMAC of the client secret that the Boundary controller + returns. It is used for comparison to the value's initial setting. + +- `disable_discovered_config_validation` (optional) Disables validation logic + to ensure that the OIDC provider's information from its discovery endpoint + matches the information here. The validation is only performed at create or + update time. + +- `idp_ca_certs` - (optional) PEM-encoded X.509 CA certificate that can be used + as trust anchors when you connect to an OIDC provider. You can specify this attribute + multiple times. + +- `issuer` - (required) The provider's issuer URL. This value must match the issuer + field in generated tokens. + +- `max_age` (optional) The max age to send to the provider. This value indicates how + much time is allowed to have passed since the last authentication before the + user is challenged again. A value of `0` sets an immediate requirement for all + users to reauthenticate, and an unset `maxAge` results in a Terraform value of + -1 and the default TTL of the chosen OIDC is used. + +- `signing-algorithm` (required) The allowed signing algorithm. You can specify this attribute + multiple times for multiple values. + + ### LDAP auth method attributes Beta The ldap auth method has the following additional attributes: -- `state` - The state of the auth method; either inactive, active-private, or - active-public. +- `state` - The state of the auth method; either `inactive`, `active-private`, or + `active-public`. -- `start_tls` - (optional) If true, issues a StartTLS command after establishing - an unencrypted connection. Defaults to false. +- `start_tls` - (optional) If `true`, issues a StartTLS command after establishing + an unencrypted connection. Defaults to `false`. -- `insecure_tls` - (optional) If true, skips LDAP server SSL certificate +- `insecure_tls` - (optional) If `true`, skips LDAP server SSL certificate validation, which is insecure and should be used with caution. Defaults to - false. + `false`. -- `discover_dn` - (optional) If true, use anon bind to discover the bind DN - (Distinguished Name) of a user. Defaults to false. +- `discover_dn` - (optional) If `true`, use anon bind to discover the bind DN + (Distinguished Name) of a user. Defaults to `false`. -- `anon_group_search` - (optional) If true, use anon bind when performing LDAP - group searches. Defaults to false. +- `anon_group_search` - (optional) If `true`, use anon bind when performing LDAP + group searches. Defaults to `false`. -- `upn_domain` - (optional) If set, the userPrincipalDomain is used to construct +- `upn_domain` - (optional) If set, the `userPrincipalDomain` is used to construct the UPN string for the authenticating user. The constructed UPN appears as - [username]@UPNDomain Example: example.com, which causes Boundary to - bind as username@example.com when it authenticates the user. + `[username]@UPNDomain`. Example: `example.com`, which causes Boundary to + bind as `username@example.com` when it authenticates the user. - `urls` - (required) The LDAP URLS that specify LDAP servers to connect to. There must be at least one URL for each LDAP auth method. When attempting to connect, the URLs are tried in the order specified. - `user_dn` - (optional) If set, the base DN under which to perform user - search. Example: ou=Users,dc=example,dc=com + search. Example: `ou=Users,dc=example,dc=com`. - `user_attr` - (optional) If set, defines the attribute on a user's entry matching the login-name passed when the user authenticates. Examples: cn, uid - `user_filter` - (optional) If set, the Go template used to construct an LDAP user search filter. The template can access the following context variables: - [UserAttr, Username]. The default user_filter is ({{.UserAttr}}={{.Username}}) - or (userPrincipalName={{.Username}}@UPNDomain) if the upn-domain parameter is + [UserAttr, Username]. The default `user_filter` is + `({{.UserAttr}}={{.Username}})` or + `(userPrincipalName={{.Username}}@UPNDomain)` if the `upn-domain` parameter is set. -- `enable_groups` - (optional) If true, an authenticated user's groups are - found during authentication. Defaults to false. +- `enable_groups` - (optional) If `true`, an authenticated user's groups are + found during authentication. Defaults to `false`. - `group_dn` - (optional) If set, the base DN under which to perform a group - search. Example: ou=Groups,dc=example,dc=com + search. Example: `ou=Groups,dc=example,dc=com`. Note: There is no default, so no base DN is used for group searches, if it's not specified. - `group_attr` - (optional) If set, the LDAP attribute to follow on objects - returned by group_filter in order to enumerate user group membership. - Examples: for group_filter queries returning group objects, use: cn. For - queries returning user objects, use: memberOf. The default is cn. + returned by `group_filter` in order to enumerate user group membership. + Examples: for `group_filter` queries returning group objects, use: `cn`. For + queries returning user objects, use: `memberOf`. The default is `cn`. - `group_filter` - (optional) If set, the Go template used when constructing the group membership query. The template can access the following context - variables: [UserDN, Username]. The default is - (|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}})), + variables: `UserDN`, `Username`. The default is + `(|(memberUid={{.Username}})(member={{.UserDN}})(uniqueMember={{.UserDN}}))`, which is compatible with several common directory schemas. - `certificates` - (optional) If set, PEM encoded x509 certificates in ASN.1 @@ -106,20 +160,20 @@ The ldap auth method has the following additional attributes: - `bind_dn` - (optional) If set, the distinguished name of entry to bind when performing user and group searches. Example: - cn=vault,ou=Users,dc=example,dc=com + `cn=vault,ou=Users,dc=example,dc=com`. -- `bind_password` - (optional) If set, the password to use along with bind_dn +- `bind_password` - (optional) If set, the password to use along with `bind_dn` when performing user search. It must be set, if you specify the optional - bind_dn. + `bind_dn`. -- `use_token_groups` - (optional) If true, use the Active Directory tokenGroups +- `use_token_groups` - (optional) If `true`, use the Active Directory `tokenGroups` constructed attribute of the user to find the group memberships. This finds all security groups, including nested ones. - `account_attribute_maps` - (optional) If set, the attribute maps from custom attributes to the standard fullname and email account attributes. These - maps are represented as key=value where the key equals the from_attribute, and - the value equals the to_attribute. For example, "preferredName=fullName". All + maps are represented as `key=value` where the key equals the `from_attribute`, and + the value equals the `to_attribute`. For example, `preferredName=fullName`. All attribute names are case insensitive. diff --git a/website/content/docs/concepts/host-discovery/aws.mdx b/website/content/docs/concepts/host-discovery/aws.mdx new file mode 100644 index 00000000000..36d88d65d09 --- /dev/null +++ b/website/content/docs/concepts/host-discovery/aws.mdx @@ -0,0 +1,116 @@ +--- +layout: docs +page_title: AWS dynamic host catalogs +description: |- + An overview of AWS host discovery in Boundary +--- +# AWS dynamic host catalogs +Boundary uses dynamic host catalogs to automatically discover AWS EC2 instances and add them as hosts. + +## Create a host catalog to connect with AWS +Boundary uses plugins to integrate with a variety of providers. To use +a dynamic host catalog to integrate with AWS, you create a host catalog of the `plugin` type +and set the `plugin-name` value to `aws`. You must also provide the specific +fields needed for Boundary to authenticate with AWS. + + + + +```shell-session +$ boundary host-catalogs create plugin \ + -scope-id $BOUNDARY_PROJECT_ID \ + -plugin-name aws \ + -attr disable_credential_rotation=true \ + -attr region=us-east-1 \ + -secret access_key_id=env://AWS_ACCESS_KEY_ID \ + -secret secret_access_key=env://AWS_SECRET_ACCESS_KEY +``` + + + + + ```hcl + resource "boundary_host_catalog_plugin" "aws_host_catalog" { + name = "AWS Catalog" + description = "AWS Host Catalog" + scope_id = boundary_scope.project.id + plugin_name = "aws" + + attributes_json = jsonencode({ + "region" = "eu-west-2", + "disable_credential_rotation" = true }) + secrets_json = jsonencode({ + "access_key_id" = var.aws_access, + "secret_access_key" = var.aws_secret}) + } + ``` + + + + +The `scope-id` and `plugin-name` fields are required when you create a + dynamic host catalog. + +The fields following the `attr` and `secret` flags are specific to AWS and are required by + Boundary for authentication. + +- `disable_credential_rotation`: When set to `true`, Boundary will not rotate the credentials with AWS automatically. +- `region`: The region to configure the host catalog for. All host sets in this + catalog will be configured for this region. +- `access_key_id`: The access key ID for the IAM user to use with this host + catalog. +- `secret_access_key`: The secret access key for the IAM user to use with this + host catalog. + + +Refer to [the domain model documentation](/boundary/docs/concepts/domain-model/host-catalogs) for additional fields that you can use when you create host catalogs. + +## Create a host set to connect with AWS +[Host sets](/boundary/docs/concepts/domain-model/host-sets) specify which AWS + filters should be used to identify the discovered hosts that should be added as members. + +Create a host set using the following command: + + + + +```shell-session +$ boundary host-sets create plugin \ + -host-catalog-id $BOUNDARY_HOST_CATALOG_ID \ + -attr filters=tag-key=foo,bar \ + -attr filters=tag-key=baz +``` + + + + +```hcl +resource "boundary_host_set_plugin" "aws_host_set" { + name = "AWS Host Set" + description = "AWS Host Set" + host_catalog_id = boundary_scope.aws_host_catalog.id + attributes_json = jsonencode({ + "filters" = ["tag-key=foo,bar", "tag-key=baz"] }) +} +``` + + + + +The `host-catalog-id` value is a required field that specifies in which host catalog to + create this host set. + +Like with the host catalog, the fields passed in after the `attr` flag are + specific to AWS. + +The `filters` field contains string filters in the format key=val1,val2. The key corresponds to + a filter option, and the value(s) are a comma-separated list. For a list of + filter options, refer to the + [describe-instances in the AWS CLI reference](https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html). + When the values in a single `filters` field are separated by a comma, either + can be true for the host to match. When multiple filters fields are provided, + they must all match for a host to match. In the example above, an instance must + have either tags `foo` or `bar`, and must have the tag `baz`. + +For more fields that you can use when creating host sets, refer to + [the domain model documentation](/boundary/docs/concepts/domain-model/host-sets). diff --git a/website/content/docs/concepts/host-discovery/azure.mdx b/website/content/docs/concepts/host-discovery/azure.mdx new file mode 100644 index 00000000000..c401237528d --- /dev/null +++ b/website/content/docs/concepts/host-discovery/azure.mdx @@ -0,0 +1,114 @@ +--- +layout: docs +page_title: Azure dynamic host catalogs +description: |- + An overview of Azure host discovery in Boundary +--- +# Azure dynamic host catalogs +Boundary uses dynamic host catalogs to automatically discover Azure resources available through Azure Resource Manager (ARM) and add them as hosts. + +## Create a host catalog to connect with Azure +Boundary uses plugins to integrate with a variety of providers. To use a +dynamic host catalog to integrate with Azure, you create a host catalog of the +`plugin` type and set the `plugin-name` value to `azure`. You must also provide the +specific fields needed for Boundary to authenticate with Azure. + + + + +```shell-session +$ boundary host-catalogs create plugin \ + -scope-id $PROJECT_ID \ + -plugin-name azure \ + -attr disable_credential_rotation=true \ + -attr tenant_id=env://ARM_TENANT_ID \ + -attr subscription_id=env://ARM_SUBSCRIPTION_ID \ + -attr client_id=env://ARM_CLIENT_ID \ + -secret secret_value=env://ARM_CLIENT_SECRET +``` + + + + +```hcl +resource "boundary_host_catalog_plugin" "azure_host_catalog" { + name = "Azure Catalog" + description = "Azure Host Catalog" + scope_id = boundary_scope.project.id + plugin_name = "azure" + + attributes_json = jsonencode({ + "tenant_id" = "ARM_TENANT_ID", + "subscription_id" = "ARM_SUBSCRIPTION_ID" + "client_id" = "ARM_CLIENT_ID" + "disable_credential_rotation" = true }) + secrets_json = jsonencode({ + "secret_value" = "ARM_CLIENT_SECRET"}) +} +``` + + + + +The `scope-id` and `plugin-name` fields are required when you create a +dynamic host catalog. + +The fields following the `attr` and `secret` flags are specific to Azure and are required by + Boundary for authentication. + +- `disable_credential_rotation`: When set to `true`, Boundary will not rotate the credentials automatically. +- `tenant_id`: The ARM Tenant(Directory) ID +- `subscription_id`: The ARM Subscription ID +- `client_id`: The ARM Client (Application) ID +- `secret_value`: The ARM Client Secret + +Refer to [the domain model documentation](/boundary/docs/concepts/domain-model/host-catalogs) for additional fields that you can use when you create host catalogs. + +## Create a host set to connect with Azure +[Host sets](/boundary/docs/concepts/domain-model/host-sets) specify which Azure +Resource Manager (ARM) filters should be used to identify the discovered hosts that should be added as members. + +Create a host set using the following command: + + + + + + +```shell-session +$ boundary host-sets create plugin \ + -name database \ + -host-catalog-id $HOST_CATALOG_ID \ + -attr filter="tagName eq 'service-type' and tagValue eq 'database'" +``` + + + + +```hcl + resource "boundary_host_set_plugin" "azure_host_set" { + name = "Azure Set" + description = "Azure Host Set" + host_catalog_id = boundary_scope.azure_host_catalog.id + attributes_json = jsonencode({ + "filter" = "tagName eq 'service-type' and tagValue eq 'database'" }) +} +``` + + + + +The `host-catalog-id` value is a required field that specifies in which host catalog to + create this host set. + +The fields following the `attr` flag are specific to Azure. + +The `filter` field represents the ARM filter used to select resources that should be a part of + this host set. There are some limitations with the filtering syntax. + Specifically, when you use tags, other types of filters (such as on resource + type) are not allowed. As a result, it is generally useful to filter + directly on tag names or values as in the following examples: + - `tagName eq 'application'` + - `tagName eq 'application' and tagValue eq 'app2'` + +Refer to [the domain model documentation](/boundary/docs/concepts/domain-model/host-catalogs) for additional fields that you can use when you create host catalogs. diff --git a/website/content/docs/concepts/host-discovery.mdx b/website/content/docs/concepts/host-discovery/index.mdx similarity index 70% rename from website/content/docs/concepts/host-discovery.mdx rename to website/content/docs/concepts/host-discovery/index.mdx index 531d614a0e8..116707fa1d8 100644 --- a/website/content/docs/concepts/host-discovery.mdx +++ b/website/content/docs/concepts/host-discovery/index.mdx @@ -10,7 +10,11 @@ description: |- Traditionally, connecting to remote hosts and services requires knowledge of the endpoint’s connection info (e.g. the IP address and port of the service). This creates complexity when managing the onboarding of new resources at scale -or dealing with dynamic services whose connection info frequently changes. +or dealing with dynamic, ephemeral services whose connection info frequently changes. +Furthermore, the increased operational overhead of having to manually manage and update +new or old resources is an inefficient use of time. +Resources should be tagged appropriately so that, depending on their identity, users +automatically have the resources that they are allowed to connect to. **Host discovery** focuses on automating the process of onboarding new or changed infrastructure resources – and their connection info – to Boundary @@ -33,7 +37,7 @@ infrastructure targets can be automated with This allows for dynamic configuration of a host and target without the need for prior knowledge of the target’s connection info. -**[Runtime host discovery via dynamic host catalogs](/boundary/tutorials/access-management/azure-host-catalogs)**: +**[Runtime host discovery via dynamic host catalogs](/boundary/tutorials/access-management/aws-host-catalogs)**: Boundary dynamic host catalogs automate the ingestion of resources from infrastructure providers into Boundary. Boundary hosts are automatically created, updated and added to host sets in order to reflect the connection @@ -44,14 +48,15 @@ configure new or changed resources. ## Dynamic host catalogs Dynamic host catalogs are an agentless workflow for Boundary to securely query infrastructure providers at runtime to discover and configure -new services. Boundary dynamic host catalogs are written in go-plugin and run -as separate processes. Boundary administrators can define rules for which +new services. Boundary administrators can define rules for which external resources should be ingested into the catalog by -[creating dynamic host](/boundary/docs/concepts/domain-model/host-sets) - sets with an attributes filter. Attributes specify the fields which the plugin - should use to lookup which hosts should be members of this host set. +[creating a host sets](/boundary/docs/concepts/domain-model/host-sets) +with an attributes filter. These filters specify which discovered hosts +should be members of the host set. -Currently, Boundary supports dynamic host catalog implementations for AWS and +Boundary currently supports dynamic host catalog for AWS and Azure and we will continue to grow this ecosystem to support additional providers. -You can get started with dynamic host catalogs [here](/boundary/tutorials/access-management/azure-host-catalogs). +You can get started with dynamic host catalogs for AWS +[here](/boundary/tutorials/access-management/aws-host-catalogs) +and for Azure [here](/boundary/tutorials/access-management/azure-host-catalogs). diff --git a/website/content/docs/concepts/iam.mdx b/website/content/docs/concepts/iam.mdx index 015ccd34be4..383b139d16f 100644 --- a/website/content/docs/concepts/iam.mdx +++ b/website/content/docs/concepts/iam.mdx @@ -41,6 +41,10 @@ method's identity provider to provide up-to-date information. [permissions](/boundary/docs/concepts/security/permissions) which are granted to any principal (user or group) from any scope. Roles can be contained by any scope, and the permissions can be applied to the same scope or any child scope. + +Roles can only be applied to a single scope at a time in Boundary, meaning each scope requires its own role. + + ## Access management ### Configure users - username/password @@ -50,9 +54,9 @@ There are 3 steps to adding new users to Boundary using the password auth method 3. In the same scope, create a new **user**, and then attach the new account to the new user. The user is now able to authenticate to Boundary. ### Configure users - OIDC/LDAP -[OIDC](/boundary/tutorials/identity-management/oidc-auth) and LDAP accounts and users are auto-vivified (automatically generated) in Boundary in the same scope -as the auth method. The accounts and users are only created once the user authenticates to Boundary for the first time. The same applies to OIDC/LDAP -[managed groups](/boundary/tutorials/identity-management/oidc-idp-groups). +With the [OIDC](/boundary/tutorials/identity-management/oidc-auth) and LDAP auth methods, Boundary uses data from the identity provider to automatically generate accounts and users in the same scope +as the auth method. The accounts and users are only created once the user authenticates to Boundary for the first time. +The same applies to OIDC/LDAP [managed groups](/boundary/tutorials/identity-management/oidc-idp-groups). ### Grant permissions When setting up access controls for a user, it is important to first consider which scope(s) the user needs access to. **Each scope requires its own set of roles**, @@ -108,4 +112,4 @@ Projects. You can use the same roles across “Boundary Admin”, “Org_A Admin”, and “Project_1 Admin” since the scopes and grants are the same. The only difference is the users/groups that are added to each role. - \ No newline at end of file + diff --git a/website/content/docs/concepts/security/data-encryption.mdx b/website/content/docs/concepts/security/data-encryption.mdx index b7ead769f24..26a7b5c3fdf 100644 --- a/website/content/docs/concepts/security/data-encryption.mdx +++ b/website/content/docs/concepts/security/data-encryption.mdx @@ -20,7 +20,7 @@ Worker](/boundary/docs/configuration/worker/pki-worker) for storage of authentic keys. It is optional for PKI workers; if not specified the authentication keys will not be encrypted on disk. This is not used by KMS workers. -## The `root` KMS key and per-scope KEK/DEKs OSS only +## The `root` KMS key and per-scope KEK/DEKs Community Edition only Following best practices of using different encryption keys for different purposes, Boundary has a number of encryption keys generated within each scope. @@ -110,14 +110,14 @@ The `bsr` KMS key is required for [session recording](/boundary/docs/configurati If you do not add a `bsr` key to your controller configuration, you will receive an error when you attempt to enable session recording. The key is used for encrypting data and checking the integrity of recordings. -## The `previous-root` KMS key OSS only +## The `previous-root` KMS key Community Edition only The `previous-root` KMS key is used when migrating to a new `root` key. Adding the `previous-root` KMS key to your configuration informs the Controller to use it for decrypting the existing information in the database, allowing you to rotate and rewrap the KEKs to complete the migration to the new root key. -## The `worker-auth` KMS key OSS only +## The `worker-auth` KMS key Community Edition only The `worker-auth` KMS key is a key shared by the Controller and Worker in order to authenticate a Worker to the Controller. Specifics of this mechanism can be @@ -125,7 +125,7 @@ found on the [Connections/TLS page](/boundary/docs/concepts/security/connections a worker is used with [PKI authentication](/boundary/docs/configuration/worker/pki-worker) this is unnecessary. -## The `recovery` KMS key OSS only +## The `recovery` KMS key Community Edition only The `recovery` KMS key is used for rescue/recovery operations that can be used by a client to authenticate almost any operation within Boundary. Its mechanism @@ -161,7 +161,7 @@ with the options to skip creating default resources, Terraform can be used to create the specific resources needed instead, with the `recovery` KMS used to authenticate setting up the initial auth method(s). -## The `config` KMS key OSS only +## The `config` KMS key Community Edition only This key can be used to encrypt values within Boundary's configuration file. By sharing this block between Boundary and an operator, the operator can put diff --git a/website/content/docs/concepts/workers.mdx b/website/content/docs/concepts/workers.mdx new file mode 100644 index 00000000000..e9f58f1f248 --- /dev/null +++ b/website/content/docs/concepts/workers.mdx @@ -0,0 +1,87 @@ +--- +layout: docs +page_title: Workers +description: |- + Introduction to Boundary workers +--- + +# Workers +Boundary's architecture consists of three main components: +1. **Control plane** - made up of controllers +1. **Data plane** - made up of workers +1. **Client** - installed on the user's device + +**Controllers** are what users authenticate to using the client, they contain Boundary's resources and permissions. In addition, controllers also communicate with external components +such as the database, KMS, Vault, identity providers, and plugins. + +**Workers** are primarily used as network proxies for Boundary sessions, they allow you to access +private targets. Instead of exposing a private network to the public, or allowing users to have access to entire private networks, workers create a direct network +tunnel between users and targets. + +![Boundary architecture example showing workers and controllers](/img/access-model.png) + +## Capabilities +You can use workers in various ways depending on your needs, as follows: + +### Session proxying + +You can use workers to proxy sessions between clients and targets located in public or private networks. In addition, you can configure workers in +[multi-hop](#multi-hop-sessions-hcp-ent) sessions and form a chain of proxies to reach deeper into protected networks. + +### Worker authentication + +Workers can [authenticate](/boundary/docs/concepts/security/connections-tls#pki-based-worker-authentication) directly to the control plane or through an upstream worker to the control plane. Authenticating through an upstream worker is also referred to as "multi-hop worker authentication." + +### Controller proxy + +In situations where controllers need access to a private service but don't have network access to it, workers can act as a proxy for communication. This is currently +supported for controllers connecting to a [private Vault](/boundary/tutorials/credential-management/hcp-private-vault-cred-injection) +environment. + +### Protocol decryption + +Workers can perform SSH protocol decryption for [credential injection](/boundary/docs/concepts/credential-management#credential-injection-hcp-ent) and [session +recording](/boundary/docs/concepts/domain-model/session-recordings). For session recording, workers also write the recorded session contents directly to the [storage +bucket](/boundary/docs/concepts/domain-model/storage-buckets). + +## Tags +In multi-datacenter and multi-cloud operating models, patterns of dividing up controllers, workers, and targets into appropriate regions or networks is often +desired to reduce latency or comply with security standards. You can assign workers [tags](/boundary/tutorials/worker-management/target-aware-workers) that Boundary +can [filter](/boundary/docs/concepts/filtering/worker-tags) through, to find the appropriate worker to use for a session. For example, Boundary could filter to workers +with tag “A,” to connect to targets in “Network A.” + +![Boundary architecture example showing workers with tags](/img/worker-tags.png) + +## Multi-hop sessions HCP/ENT +Most organizations want to provide access to infrastructure without exposing private networks. Many organizations also have complex network topologies requiring +inbound traffic to route through multiple network enclaves in order to reach the target system. +[Multi-hop](/boundary/docs/configuration/worker#multi-hop-worker-capabilities-hcp-ent) sessions allow you to chain together two or more workers +across multiple networks to form reverse proxy connections between the user and the target, even in complex networks with strict outbound-only policies. + +In multi-hop scenarios, there are typically three types of workers: +1. **Ingress worker** - An ingress worker is a worker that is accessible by the client. The client initiates the connection to the ingress worker. +1. **Intermediary worker** - An optional intermediary worker sits between ingress and egress workers as part of a multi-hop chain. There can be multiple intermediary workers as part of a multi-hop chain. +1. **Egress worker** - An egress worker is a worker that can access the target. The egress worker initiates reverse proxy connections to intermediary or ingress workers. + + +“Ingress,” “intermediary,” and “egress” are general ways to describe how the respective worker interfaces with resources, and a worker can act as more than one of those +at a time. For example in the diagram below, the intermediary worker is also an egress worker since it can access a target. + + +![Multi-hop session example showing ingress, intermediary, and egress workers](/img/concepts-multihop.png) + +After the persistent connection chain is established between the workers, when you attempt to connect to a target host, you are automatically proxied from: +1. Boundary client to ingress worker +1. Ingress worker to intermediary worker, where applicable +1. Ingress worker to egress worker +1. Egress worker to desired target + +## Deployment +Workers are services that can run on a container or virtual machine. You should deploy them strategically within networks to provide access to targets. In +all editions of Boundary, workers are fully self-managed and can be deployed anywhere. In HCP Boundary, HCP-managed workers are automatically deployed with the cluster. + +To learn more about workers and deployment, see: +* [Worker configuration](/boundary/docs/configuration/worker) +* [Recommended architecture](/boundary/docs/install-boundary/recommended-architecture) +* [Worker system requirements](/boundary/docs/install-boundary/system-requirements) +* [Worker management tutorials](/boundary/tutorials/worker-management) diff --git a/website/content/docs/configuration/kms/awskms.mdx b/website/content/docs/configuration/kms/awskms.mdx index 19310ad64c9..96d75320da8 100644 --- a/website/content/docs/configuration/kms/awskms.mdx +++ b/website/content/docs/configuration/kms/awskms.mdx @@ -31,7 +31,9 @@ kms "awskms" { These parameters apply to the `kms` stanza in the Boundary configuration file: - `purpose` - Purpose of this KMS, acceptable values are: `worker-auth`, `worker-auth-storage`, - `root`, `previous-root`, `recovery`, or `config`. + `root`, `previous-root`, `recovery`, `bsr`, or `config`. + + To [enable session recording](/boundary/docs/configuration/session-recording/enable-session-recording), you must configure the `bsr` value for the `purpose`. - `region` `(string: "us-east-1")`: The AWS region where the encryption key lives. If not provided, may be populated from the `AWS_REGION` or diff --git a/website/content/docs/configuration/session-recording/create-storage-bucket.mdx b/website/content/docs/configuration/session-recording/create-storage-bucket.mdx index f3b55a0f326..3c4b4693e6e 100644 --- a/website/content/docs/configuration/session-recording/create-storage-bucket.mdx +++ b/website/content/docs/configuration/session-recording/create-storage-bucket.mdx @@ -8,9 +8,9 @@ description: |- # Create a storage bucket As of Boundary 0.13.0, you can record and audit user sessions. -A resource known as a [storage bucket](/boundary/docs/concepts/domain-model/storage-buckets) is used to store the recorded sessions. +A Boundary resource known as a [storage bucket](/boundary/docs/concepts/domain-model/storage-buckets) is used to store the recorded sessions. The storage bucket represents a bucket in an external store. -Before you can enable session recording, you must create one or more storage buckets. +Before you can enable session recording, you must create one or more storage buckets in Boundary and associate them with the external store. A storage bucket can only belong to the Global scope or an Org scope. A storage bucket that is associated with the Global scope can be associated with any target. @@ -19,14 +19,18 @@ Any storage buckets associated with an Org scope are deleted when the Org itself For more information about using session recording to audit user sessions, refer to [Auditing](/boundary/docs/concepts/auditing). -**Requirements**: -- An AWS S3 storage bucket +## Requirements + +Before you can create a storage bucket in Boundary, you must ensure that your environment meets certain requirements. +Session recording requires specific configuration for both the external store and the Boundary worker. +At this time, the only supported storage is AWS S3. - At this time, the only supported storage for storage buckets is AWS S3. - In AWS S3, a storage bucket contains the bucket name, region, and optional prefix, as well as any credentials needed to access the bucket, such as the access and secret key. -- A Boundary PKI worker with access to the AWS S3 storage bucket +### AWS requirements - This worker must be configured with a local recording storage path, defined as `recording_storage_path`. For more details, refer to [PKI workers](/boundary/docs/configuration/worker/pki-worker#session-recording-hcp-ent). +- An AWS S3 storage bucket + + You must associate the Boundary storage bucket with an AWS S3 storage bucket. + An AWS S3 storage bucket contains the bucket name, region, and optional prefix, as well as any credentials needed to access the bucket, such as the access and secret key. - An AWS IAM role policy with the following statement: ```json { @@ -52,28 +56,104 @@ For more information about using session recording to audit user sessions, refer } ] } + + ``` +- If you apply KMS encryption to the storage bucket, you must add these additional permissions to the role policy for the storage bucket's IAM user: + ```json + { + "Action": [ + "kms:Decrypt", + "kms:GenerateDataKey", + "kms:DescribeKey" + ], + "Effect": "Allow", + "Resource": "arn:aws:kms:us-east-1:1234567890:key/uuid" + } ``` + The following is an example working policy with KMS encryption configured on the S3 bucket: + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "S3Permissions", + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:GetObjectAttributes" + ], + "Resource": [ + "arn:aws:s3:::test-session-recording-bucket/*" + ] + }, + { + "Sid": "UserPermissions", + "Effect": "Allow", + "Action": [ + "iam:DeleteAccessKey", + "iam:GetUser", + "iam:CreateAccessKey" + ], + "Resource": [ + "arn:aws:iam::1234567890:user/test-boundary" + ] + }, + { + "Sid": "KMSPermissions", + "Effect": "Allow", + "Action": [ + "kms:Decrypt", + "kms:GenerateDataKey", + "kms:DescribeKey" + ], + "Resource": [ + "arn:aws:kms:us-east-2:1234567890:key/4b887395-c376-4936-8f37-80c592ea582c" + ] + } + ] + } + + ``` + +### Boundary workers requirements + +[Session recording](/boundary/docs/configuration/session-recording) requires at least one PKI worker that: +- Has access to the AWS S3 storage bucket +- Has an accessible directory defined by `recording_storage_path` for storing session recordings while they are in progress. On session closure, Boundary moves the local session recording to remote storage and deletes the local copy. For more details, refer to [PKI workers](/boundary/docs/configuration/worker/pki-worker#session-recording-hcp-ent). +- Has at least 1 MB of available disk space. +- Runs Darwin, Windows, or Linux. The following binaries are not supported for session recording: NetBSD, OpenBSD, Solaris. + +Development example: + +```hcl +worker { +auth_storage_path="/boundary/demo-worker-1" +initial_upstreams = ["10.0.0.1"] +recording_storage_path="/local/storage/directory" +} +``` -Complete the following steps to create a storage bucket for session recording: +Complete the following steps to create a storage bucket in Boundary for session recording: 1. Log in to Boundary. -1. Use the following command to create a storage bucket: +1. Use the following command to create a storage bucket in Boundary: ```bash boundary storage-buckets create -bucket-name mybucket1 -plugin-name aws -secrets ‘{“access_key_id”: “123456789” , “secret_access_key” : “123/456789/12345678”}’ -worker-filter ‘“dev” in “/tags/type”’ -attributes ‘{“region”:”us-east-1”,”disable_credential_rotation”:true}’ -scope-id o_1234567890 ``` - Replace the values above with the following required AWS secrets and any optional [attributes](/boundary/docs/concepts/domain-model/storage-buckets) you want to associate with the storage bucket: + Replace the values above with the following required AWS secrets and any optional [attributes](/boundary/docs/concepts/domain-model/storage-buckets) you want to associate with the Boundary storage bucket: - `region`: (Required) The AWS region to use. - - `bucket-name`: (Required) Name of the bucket in AWS. + - `bucket-name`: (Required) Name of the AWS bucket you want to associate with the Boundary storage bucket. - `access_key_id`: (Required) The AWS access key to use. - `secret_access_key_id`: (Required) The AWS secret access key to use. This attribute contains the secret access key for static credentials. - - `worker-filter`: (Required) A filter that indicates which Boundary workers have access to the storage. The filter must match an existing worker in order to create a storage bucket. + - `worker-filter`: (Required) A filter that indicates which Boundary workers have access to the storage. The filter must match an existing worker in order to create a Boundary storage bucket. - `shared_credentials_file`: (Optional) The shared credentials file to use. - `shared_credentials_profile`: (Optional) The profile name to use in the shared credentials file. @@ -84,17 +164,17 @@ Complete the following steps to create a storage bucket for session recording: 1. Log in to Boundary. 1. Select **Storage Buckets** in the navigation bar. 1. Select **New Storage Bucket**. -1. Complete the following fields: +1. Complete the following fields to create the Boundary storage bucket: - **Name**: (Optional) The name field is optional, but if you enter a name it must be unique. - - **Description**: (Optional) An optional description of the bucket for identification purposes. + - **Description**: (Optional) An optional description of the Boundary storage bucket for identification purposes. - **Scope**: (Required) A storage bucket can belong to the Global scope or an Org scope. It can only associated with targets from the scope it belongs to. - - **Bucket name**: (Required) Name of the bucket in AWS. + - **Bucket name**: (Required) Name of the AWS bucket you want to associate with the Boundary storage bucket. - **Bucket prefix**: (Optional) A base path where session recordings are stored. - **Region**: (Required) The AWS region to use. - **Access key ID**: (Required) The access key ID that AWS generates for the IAM user to use with the storage bucket. - **Secret access key**: (Required) The secret access key that AWS generates for the IAM user to use with this storage bucket. - - **Worker filter** (Required) A filter that indicates which Boundary workers have access to the storage. The filter must match an existing worker in order to create a storage bucket. + - **Worker filter** (Required) A filter that indicates which Boundary workers have access to the storage. The filter must match an existing worker in order to create a Boundary storage bucket. - **Disable credential rotation** (Optional) Although credentials are stored encrypted within Boundary, by default the [AWS plugin](https://github.com/hashicorp/boundary-plugin-aws) attempts to rotate the credentials you provide. The given credentials are used to create a new credential, and then the original credential is revoked. After rotation, only Boundary knows the client secret the plugin uses. Select this option to disable this behavior and prevent the automatic rotation of credentials. @@ -104,5 +184,5 @@ Complete the following steps to create a storage bucket for session recording: -Boundary creates the storage bucket and provides you with the bucket's ID. -Once the storage bucket is created, you can use the bucket's ID to [enable session recording on targets](/boundary/docs/configuration/session-recording/enable-session-recording). +Boundary creates the storage bucket resource and provides you with the bucket's ID. +Once the storage bucket is created in Boundary, you can use the bucket's ID to [enable session recording on targets](/boundary/docs/configuration/session-recording/enable-session-recording). diff --git a/website/content/docs/configuration/session-recording/enable-session-recording.mdx b/website/content/docs/configuration/session-recording/enable-session-recording.mdx index cab4f09cf5e..b52e466057d 100644 --- a/website/content/docs/configuration/session-recording/enable-session-recording.mdx +++ b/website/content/docs/configuration/session-recording/enable-session-recording.mdx @@ -1,6 +1,6 @@ --- layout: docs -page_title: Create a storage bucket +page_title: Enable session recording on a target description: |- How to enable session recording on a target in Boundary --- @@ -65,12 +65,9 @@ Complete the following steps to enable session recording on a target. - To create a new target, select **New Target**. - To edit an existing target, select the target, and then select **Edit Form**. 1. Configure the target with any relevant [attributes](/boundary/docs/concepts/domain-model/targets). -The following settings are required for session recording: +The following setting is required for session recording: - Select **SSH** for the **Type**. - - On the **Injected Application Credentials** tab, select the inject application credential sources you want to use for this target. - - ![Add injected application credentials](/img/inject-creds.png) 1. Select **Save**. 1. Select **Enable recording**. diff --git a/website/content/docs/configuration/worker/index.mdx b/website/content/docs/configuration/worker/index.mdx index 55b4d1c17e7..ec3470b486f 100644 --- a/website/content/docs/configuration/worker/index.mdx +++ b/website/content/docs/configuration/worker/index.mdx @@ -39,6 +39,9 @@ Regardless of registration mechanism, the following fields are supported. worker { public_addr = "5.1.23.198" + # Local storage path required if session recording is enabled + recording_storage_path = "tmp/boundary/" + # Mutually exclusive with hcp_boundary_cluster_id initial_upstreams = [ "10.0.0.1", @@ -74,6 +77,18 @@ worker { using env or file, their contents must formatted as a JSON array: `["127.0.0.1", "192.168.0.1", "10.0.0.1"]` + HCP Boundary workers require the [`hcp_boundary_cluster_id`](/boundary/docs/configuration/worker/#hcp_boundary_cluster_id) parameter instead of `initial upstreams`. + If you configure an HCP worker with `initial_upstreams`, the worker configuration fails. + +- `hcp_boundary_cluster_id` - A string that you must use to configure PKI workers + to connect to your HCP Boundary cluster rather than specifying + `initial_upstreams`. This parameter is currently only valid for workers using the PKI + registration method and for workers directly connected to HCP Boundary. + +- `recording_storage_path` - A path to the local storage for recorded sessions. + Session recordings are stored in the local storage while they are in progress. + When the session is complete, Boundary moves the local session recording to remote storage and deletes the local copy. + - `tags` - A map of key-value pairs where values are an array of strings. Most commonly used for [filtering](/boundary/docs/concepts/filtering) targets a worker can proxy via [worker @@ -81,11 +96,6 @@ worker { set here will be re-parsed and new values used. It can also be a string referring to a file on disk (`file://`) or an env var (`env://`). -- `hcp_boundary_cluster_id` - A string that can be used to configure PKI workers - to connect to your HCP Boundary cluster rather than specifying - `initial_upstreams`. This is currently only valid for workers using the PKI - registration method and for workers directly connected to HCP Boundary. - ## Signals The `SIGHUP` signal causes a worker to reload its configuration file to pick up any updates for the `initial_upstreams` and `tags` values. @@ -157,6 +167,9 @@ worker { # Path for worker storage, assuming PKI registration. Must be unique across workers auth_storage_path="/boundary/demo-worker-1" + # Local storage path required if session recording is enabled + recording_storage_path = "tmp/boundary/" + # Workers typically need to reach upstreams on :9201 initial_upstreams = [ "10.0.0.1", @@ -194,3 +207,4 @@ Refer to the [Manage Multi-Hop Sessions with HCP Boundary](/boundary/tutorials/h [pki workers]: /boundary/docs/configuration/worker/pki-worker [target]: /boundary/docs/concepts/domain-model/targets [target worker filters]: /boundary/docs/concepts/filtering/worker-tags#target-worker-filtering +[session recording]: /boundary/docs/configuration/session-recording diff --git a/website/content/docs/enterprise/automated-license-reporting.mdx b/website/content/docs/enterprise/automated-license-reporting.mdx index ebef96bdb23..0304fbd7ba6 100644 --- a/website/content/docs/enterprise/automated-license-reporting.mdx +++ b/website/content/docs/enterprise/automated-license-reporting.mdx @@ -9,7 +9,7 @@ description: >- Automated license utilization reporting sends license utilization data to HashiCorp without requiring you to manually collect and report them. It also lets you review your license usage with the monitoring solution you already use (for example Splunk, Datadog, or others) so you can optimize and manage your deployments. Use these reports to understand how much more you can deploy under your current contract, protect against overutilization, and budget for predicted consumption. -Automated reporting shares the minimum data required to validate license utilization as defined in our contracts. They consist of mostly computed metrics and will never contain Personal Identifiable Information (PII) or other sensitive information. Automated reporting shares the data with HashiCorp using a secure, unidirectional HTTPS API and makes an auditable record in the product logs each time it submits a report. +Automated reporting shares the minimum data required to validate license utilization as defined in our contracts. They consist of mostly computed metrics and will never contain Personal Identifiable Information (PII) or other sensitive information. Automated reporting shares the data with HashiCorp using a secure, unidirectional HTTPS API and makes an auditable record in the product logs each time it submits a report. The reporting process is GDPR compliant and submits reports roughly once every 24 hours. ## Enable automated reporting @@ -30,7 +30,7 @@ Upgrade to a release that supports license utilization reporting. These releases ### 3. Check logs -Automatic license utilization reporting will start sending data within 24 hours. Check the product logs for records that the data sent successfully. +Automatic license utilization reporting will start sending data within roughly 24 hours. Check the product logs for records that the data sent successfully. The records are sent as system events. You can use [`file sync`](/boundary/docs/configuration/events/file) to configure a file where Boundary logs events. @@ -77,7 +77,7 @@ If your installation is air-gapped or your network doesn’t allow the correct e } ``` -In this case, reconfigure your network to allow egress and check back in 24 hours. +In this case, reconfigure your network to allow egress and check back in roughly 24 hours. ## Opt out @@ -111,7 +111,7 @@ $ export OPTOUT_LICENSE_REPORTING=true Now restart your system. -Check your product logs 24 hours after opting out to make sure that the system isn’t trying to send reports. +Check your product logs roughly 24 hours after opting out to make sure that the system isn’t trying to send reports. If your configuration file and environment variable differ, the environment variable setting will take precedence. diff --git a/website/content/docs/enterprise/index.mdx b/website/content/docs/enterprise/index.mdx index 417e19cf80a..3116a777793 100644 --- a/website/content/docs/enterprise/index.mdx +++ b/website/content/docs/enterprise/index.mdx @@ -7,7 +7,7 @@ description: |- # Boundary Enterprise -Boundary Enterprise has features that are not available in the open source version of the product, as well as support. +Boundary Enterprise has features that are not available in the Community Edition of the product, as well as support. The Enterprise version is self-managed in your environment, making it the perfect choice for agencies that are not able to use SaaS products for compliance or regulatory reasons. The [Install Boundary](/boundary/docs/install-boundary) section can help you build a self-managed production environment using Boundary Enterprise. @@ -16,9 +16,9 @@ You must obtain a [license](/boundary/docs/enterprise/licensing) to use Boundary You can enable or opt out of automated license reporting. Refer to [Automated license utilization reporting](/boundary/docs/enterprise/automated-license-reporting) for more information. -## Upgrade from Boundary OSS +## Upgrade from Boundary Community Edition -To upgrade from the open source version of Boundary to Boundary Enterprise, complete the following steps: +To upgrade from the Community Edition of Boundary to Boundary Enterprise, complete the following steps: 1. [Contact sales](https://hashicorp.com/contact-sales) to procure a Boundary Enterprise license. 1. [Upgrade Boundary](/boundary/tutorials/self-managed-deployment/upgrade-version) using a Boundary Enterprise binary. diff --git a/website/content/docs/getting-started/dev-mode/connect-to-dev-target.mdx b/website/content/docs/getting-started/dev-mode/connect-to-dev-target.mdx index 049cada2633..583739e06a0 100644 --- a/website/content/docs/getting-started/dev-mode/connect-to-dev-target.mdx +++ b/website/content/docs/getting-started/dev-mode/connect-to-dev-target.mdx @@ -54,7 +54,9 @@ within the authorized session. When you are finished making connections, simply Boundary includes connect helpers that automatically accept host SSH key prompts for you. These are written as `boundary connect `. -In the following example, the helper will automatically execute `ssh` for you, +### SSH connect helper + +In the following example, the helper automatically executes `ssh` for you, filling in the local address and port. An expected host ID is set to avoid warnings on future connections when a different port is allocated automatically. A host key must still be accepted upon first connect. @@ -63,8 +65,21 @@ A host key must still be accepted upon first connect. $ boundary connect ssh -target-id ttcp_1234567890 ``` + + If you want to pass additional flags to the SSH client, add them to the command -line separated by a double-dash; anything after the double dash will be passed +line separated by a double dash; anything after the double dash is passed to the executed client. For instance: ```shell-session @@ -82,6 +97,150 @@ specify a username other than your currently logged-in user. This ensures that regardless of your `-style` choice, the username is properly passed to the executed client, and you don't need to figure out the syntax yourself. +### RDP connect helper + +The RDP helper attempts to decide which RDP client to use +depending on your operating system. On Windows, it uses `mstsc.exe`. On +Mac, it uses `open`. Other operating systems are not currently supported. + +```shell-session +$ boundary connect rdp -target-id ttcp_eTcZMueUYv +``` + +This command executes the RDP client and establishes the connection through +Boundary. + + + +The RDP helper also supports a `-style` flag to allow you to override the +default RDP client. Recognized styles are `mstsc` and `open`. + +### PostgreSQL connect helper + +The PostgreSQL helper executes `psql`, and passes information such as the username and +database name from the command line. + +```shell-session +$ boundary connect postgres -target-id ttcp_eTcZMueUYv -username admin -dbname postgres +``` + + + +The PostgreSQL helper automatically injects brokered credentials, allowing you +to access the PostgreSQL instance without knowing the username and password. + +```shell-session +$ boundary connect postgres -target-id ttcp_eTcZMueUYv -dbname postgres +``` + + + +The helper also supports a `-style` flag to allow you to override the +default PostgreSQL client. The only recognized style is `psql`. + +### HTTP connect helper + +The HTTP connect helper executes `curl`, passing information such as the HTTP path, +method, verb, and others. It can be useful to call API endpoints through the Boundary +proxy. In the example below, we call a weather API endpoint to get the rain forecast for +New York for the next few days. + +```shell-session +$ boundary connect http -target-id ttcp_VlpkajEuuf -path '/v1/forecast?latitude=40.7143&longitude=-74.006&daily=precipitation_sum&timezone=America%2FNew_York' +``` + + + +The HTTP helper supports several flags: + +- `host`: Specifies the host value to use, overriding the endpoint address from the session information. + +- `method`: Specifies the method to use. If you do not set this value, the helper uses the client's default method. + +- `path`: Specifies a path that is appended to the generated URL. + +- `scheme`: Specifies the scheme to use. The default is https. + +The helper also supports a `-style` flag to allow you to override the +default HTTP client. The only recognized style is `curl`. + +### Kubernetes connect helper + +The Kubernetes connect helper executes `kubectl` by proxying the call through Boundary. +It can be useful to access a Kubernetes cluster that does not publicly expose its API server. +To pass subcommands to `kubectl`, use `--` followed by the command you want to pass. In +this example, we're calling `kubectl top node`: + +```shell-session +$ boundary connect kube -target-id ttcp_Yq0QCUMSe2 -- top node +``` + + + +The Kubernetes helper supports several flags: + +- `host`: Specifies the host value to use, overriding the endpoint address from the session information. + +- `scheme`: Specifies the scheme to use. The default is https. + +The helper also supports a `-style` flag to allow you to override the +default Kubernetes client. The only recognized style is `kubectl`. + ## Select targets When using `boundary connect` you must identify the target used for connecting. @@ -105,18 +264,10 @@ $ boundary connect ssh -target-name "Generated target" -target-scope-name "Gener ## Built-in vs. exec -Boundary comes with built-in wrappers for popular layer 7 connection protocols, -such as: - -- `ssh`: defaults to the local SSH client (`ssh`) -- `postgres`: defaults to the official Postgres CLI client (`psql`) -- `rdp`: defaults to the built-in Windows RDP client (`mstsc`) -- `http`: defaults to `curl` -- `kube`: defaults to `kubectl` - -However, `boundary connect` can accommodate executing clients even when there is -no built-in support for a specific client using `-exec`. The `-exec` flag is a -very powerful tool, allowing you to wrap Boundary TCP sessions in your preferred +We've seen the built-in connect helpers above, however, `boundary connect` +can accommodate executing clients even when there is no built-in support +for a specific client using `-exec`. The `-exec` flag is a very powerful +tool, allowing you to wrap Boundary TCP sessions in your preferred client. You can use this flag to create an authenticated proxy to almost anything. @@ -217,4 +368,4 @@ proxy details. See our [basic administration workflows](/boundary/tutorials/oss-administration) -for in depth discussion on managing scopes, targets, identities, and sessions. \ No newline at end of file +for in depth discussion on managing scopes, targets, identities, and sessions. diff --git a/website/content/docs/getting-started/dev-mode/dev-mode.mdx b/website/content/docs/getting-started/dev-mode/dev-mode.mdx index 80973633777..ba20075db65 100644 --- a/website/content/docs/getting-started/dev-mode/dev-mode.mdx +++ b/website/content/docs/getting-started/dev-mode/dev-mode.mdx @@ -1,7 +1,7 @@ --- layout: docs page_title: What is dev mode? -description: Getting started with Boundary OSS in dev mode +description: Getting started with Boundary Community Edition in dev mode --- # What is dev mode? diff --git a/website/content/docs/getting-started/index.mdx b/website/content/docs/getting-started/index.mdx index 9ca91aee358..1497cb28e81 100644 --- a/website/content/docs/getting-started/index.mdx +++ b/website/content/docs/getting-started/index.mdx @@ -15,7 +15,7 @@ There are three options to get started with Boundary: easiest way to get started. It streamlines deployment and operations, and is available as a freemium service. HCP Boundary allows users to run their own worker proxies, preventing exposure of your network to HashiCorp or clients. - HCP Boundary has features that are not available in the open source version of the product, as well as support. + HCP Boundary has features that are not available in the Community Edition of the product, as well as support. It has full feature-parity with Boundary Enterprise. Review the [Get started with HCP Boundary](/boundary/docs/getting-started/deploy-and-login) section to @@ -24,9 +24,9 @@ There are three options to get started with Boundary: It has full feature-parity with HCP Boundary. The Enterprise version is self-managed in your environment, making it the perfect choice for agencies that are not able to use SaaS products for compliance or regulatory reasons. The [Install Boundary](/boundary/docs/install-boundary) section can help you build a self-managed production environment using Boundary Enterprise. -- **Boundary OSS** is an open source distribution of Boundary that users can run +- **Boundary Community Edition** is a free distribution of Boundary that users can run in their own environments. The [Dev Mode quick start](/boundary/docs/oss/installing/dev-mode) section discusses running Boundary - in Dev mode on your local machine, allowing you to get started with the OSS distribution quickly. + in Dev mode on your local machine, allowing you to get started with the Community Edition distribution quickly. We recommend you use the Boundary model that best meets your intended use case. If you’re not sure what’s best for you, we recommend reviewing the [Getting diff --git a/website/content/docs/hcp/get-started/connect-to-target.mdx b/website/content/docs/hcp/get-started/connect-to-target.mdx index e0bcf354765..dff185fd5b8 100644 --- a/website/content/docs/hcp/get-started/connect-to-target.mdx +++ b/website/content/docs/hcp/get-started/connect-to-target.mdx @@ -57,7 +57,9 @@ within the authorized session. When you are finished making connections, simply Boundary includes connect helpers that automatically accept host SSH key prompts for you. These are written as `boundary connect `. -In the following example, the helper will automatically execute `ssh` for you, +#### SSH connect helper + +In the following example, the helper automatically executes `ssh` for you, filling in the local address and port. An expected host ID is set to avoid warnings on future connections when a different port is allocated automatically. A host key must still be accepted upon first connect. @@ -66,8 +68,21 @@ A host key must still be accepted upon first connect. $ boundary connect ssh -target-id ttcp_eTcZMueUYv ``` + + If you want to pass additional flags to the SSH client, add them to the command -line separated by a double-dash; anything after the double dash will be passed +line separated by a double dash; anything after the double dash is passed to the executed client. For instance: ```shell-session @@ -85,6 +100,150 @@ specify a username other than your currently logged-in user. This ensures that regardless of your `-style` choice, the username is properly passed to the executed client, and you don't need to figure out the syntax yourself. +#### RDP connect helper + +The RDP helper attempts to decide which RDP client to use +depending on your operating system. On Windows, it uses `mstsc.exe`. On +Mac, it uses `open`. Other operating systems are not currently supported. + +```shell-session +$ boundary connect rdp -target-id ttcp_eTcZMueUYv +``` + +This command executes the RDP client and establishes the connection through +Boundary. + + + +The RDP helper also supports a `-style` flag to allow you to override the +default RDP client. Recognized styles are `mstsc` and `open`. + +#### PostgreSQL connect helper + +The PostgreSQL helper executes `psql`, and passes information such as the username and +database name from the command line. + +```shell-session +$ boundary connect postgres -target-id ttcp_eTcZMueUYv -username admin -dbname postgres +``` + + + +The PostgreSQL helper automatically injects brokered credentials, allowing you +to access the PostgreSQL instance without knowing the username and password. + +```shell-session +$ boundary connect postgres -target-id ttcp_eTcZMueUYv -dbname postgres +``` + + + +The helper also supports a `-style` flag to allow you to override the +default PostgreSQL client. The only recognized style is `psql`. + +#### HTTP connect helper + +The HTTP connect helper executes `curl`, passing information such as the HTTP path, +method, verb, and others. It can be useful to call API endpoints through the Boundary +proxy. In the example below, we call a weather API endpoint to get the rain forecast for +New York for the next few days. + +```shell-session +$ boundary connect http -target-id ttcp_VlpkajEuuf -path '/v1/forecast?latitude=40.7143&longitude=-74.006&daily=precipitation_sum&timezone=America%2FNew_York' +``` + + + +The HTTP helper supports several flags: + +- `host`: Specifies the host value to use, overriding the endpoint address from the session information. + +- `method`: Specifies the method to use. If you do not set this value, the helper uses the client's default method. + +- `path`: Specifies a path that is appended to the generated URL. + +- `scheme`: Specifies the scheme to use. The default is https. + +The helper also supports a `-style` flag to allow you to override the +default HTTP client. The only recognized style is `curl`. + +#### Kubernetes connect helper + +The Kubernetes connect helper executes `kubectl` by proxying the call through Boundary. +It can be useful to access a Kubernetes cluster that does not publicly expose its API server. +To pass subcommands to `kubectl`, use `--` followed by the command you want to pass. In +this example, we're calling `kubectl top node`: + +```shell-session +$ boundary connect kube -target-id ttcp_Yq0QCUMSe2 -- top node +``` + + + +The Kubernetes helper supports several flags: + +- `host`: Specifies the host value to use, overriding the endpoint address from the session information. + +- `scheme`: Specifies the scheme to use. The default is https. + +The helper also supports a `-style` flag to allow you to override the +default Kubernetes client. The only recognized style is `kubectl`. + ## Select targets When using `boundary connect` you must identify the target used for connecting. @@ -108,18 +267,10 @@ $ boundary connect ssh -target-name "Generated target" -target-scope-name "Gener ## Built-in vs. exec -Boundary comes with built-in wrappers for popular layer 7 connection protocols, -such as: - -- `ssh`: defaults to the local SSH client (`ssh`) -- `postgres`: defaults to the official Postgres CLI client (`psql`) -- `rdp`: defaults to the built-in Windows RDP client (`mstsc`) -- `http`: defaults to `curl` -- `kube`: defaults to `kubectl` - -However, `boundary connect` can accommodate executing clients even when there is -no built-in support for a specific client using `-exec`. The `-exec` flag is a -very powerful tool, allowing you to wrap Boundary TCP sessions in your preferred +We've seen the built-in connect helpers above, however, `boundary connect` +can accommodate executing clients even when there is no built-in support +for a specific client using `-exec`. The `-exec` flag is a very powerful +tool, allowing you to wrap Boundary TCP sessions in your preferred client. You can use this flag to create an authenticated proxy to almost anything. diff --git a/website/content/docs/hcp/get-started/deploy-and-login.mdx b/website/content/docs/hcp/get-started/deploy-and-login.mdx index ae3f0b148ec..9d731b266cc 100644 --- a/website/content/docs/hcp/get-started/deploy-and-login.mdx +++ b/website/content/docs/hcp/get-started/deploy-and-login.mdx @@ -29,7 +29,7 @@ To get started with HCP Boundary, users need: If you prefer to get started on your local machine, refer to [Run and Login in Dev Mode](/boundary/docs/oss/installing/run-and-login). For more information on Boundary -OSS and self-managed installations, refer to [Boundary OSS](/boundary/docs/oss). +Community Edition and self-managed installations, refer to [Boundary Community Edition](/boundary/docs/oss). ## Deploy an HCP Boundary cluster diff --git a/website/content/docs/install-boundary/configure-controllers.mdx b/website/content/docs/install-boundary/configure-controllers.mdx index e3d7ceee9c2..0bfed2d397c 100644 --- a/website/content/docs/install-boundary/configure-controllers.mdx +++ b/website/content/docs/install-boundary/configure-controllers.mdx @@ -110,7 +110,9 @@ AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY + In the example above, the proper IAM roles and permissions for the given `AWS_ACCESS_KEY` and `AWS_SECRET_ACCESS_KEY` must be in place so that Boundary can use them to access the different KMS keys. + Next, populate the `controller.hcl` file with any relevant configuration information. @@ -171,7 +173,7 @@ controller { # This is the public hostname or IP where the workers can reach the # controller. This should typically be a load balancer address - public_cluster_address = "example-cluster-lb.example.com" + public_cluster_addr = "example-cluster-lb.example.com" # Enterprise license file, can also be the raw value or env:// value license = "file:///path/to/license/file.hclic" diff --git a/website/content/docs/install-boundary/configure-workers.mdx b/website/content/docs/install-boundary/configure-workers.mdx index 19f6e003171..5e521fd57b5 100644 --- a/website/content/docs/install-boundary/configure-workers.mdx +++ b/website/content/docs/install-boundary/configure-workers.mdx @@ -7,8 +7,6 @@ description: |- # Configure workers -In this topic, we present an opinionated deployment model to show Boundary Enterprise features such as multi-hop sessions. - Before you configure workers, you should have completed the following steps: - Installed Boundary on at least three [controller nodes](/boundary/docs/install-boundary/configure-controllers). @@ -20,7 +18,7 @@ Before you configure workers, you should have completed the following steps: In the following configuration files, there are common configuration components as well as some unique components depending on the role the Boundary worker performs. There are three files, one for each worker in a unique network boundary. -In a multi-hop configuration, the Boundary workers can serve one of three purposes: an ingress worker, an ingress/egress worker, or an egress worker. +Additionally, Boundary Enterprise supports a [multi-hop configuration](/boundary/docs/configuration/worker#multi-hop-worker-capabilities-hcp-ent) in which the Boundary workers can serve one of three purposes: an ingress worker, an ingress/egress worker, or an egress worker. ## Prepare the environment files @@ -41,7 +39,9 @@ AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY + In the example above, the proper IAM roles and permissions for the given `AWS_ACCESS_KEY` and `AWS_SECRET_ACCESS_KEY` must be in place so that Boundary can use them to access the different KMS keys. + ## Prepare the worker KMS keys @@ -62,6 +62,21 @@ After you create the requisite key or keys in the cloud provider of your choice, The following configuration examples all employ the PKI method of authentication with a worker-led authorization flow. For more information on configuring KMS authentication for Boundary workers, refer to the [KMS worker configuration documenation](/boundary/docs/configuration/worker/kms-worker). +If you use Boundary Enterprise, you can configure multiple workers to act in three different roles: ingress, intermediary, and egress. +For Community Edition, workers only serve one role, acting as both the point of ingress and egress. +Select your Boundary edition, and complete the following steps to configure workers. + + + + +For Boundary Enterprise, you can configure ingress, intermediary, and egress workers to take advantage of [multi-hop worker capabilities](/boundary/docs/configuration/worker#multi-hop-worker-capabilities-hcp-ent). + +Note that "ingress," "intermediary," and "egress" are general ways to describe how the respective worker interacts with resources. +A worker can serve more than one of those roles at a time. +Refer to [Multi-hop sessions](/boundary/docs/concepts/workers#multi-hop-sessions-hcp-ent) for more information. + +Complete the steps below to configure workers for Boundary Enterprise. + ### Ingress worker configuration Create the `ingress-worker.hcl` file with the relevant configuration information: @@ -74,7 +89,7 @@ disable_mlock = true # listener denoting this is a worker proxy listener "tcp" { - address = "0.0.0.0:9201" + address = "0.0.0.0:9202" purpose = "proxy" } @@ -143,7 +158,7 @@ disable_mlock = true # listener denoting this is a worker proxy listener "tcp" { - address = "0.0.0.0:9201" + address = "0.0.0.0:9202" purpose = "proxy" } @@ -151,7 +166,7 @@ listener "tcp" { # worker service worker { public_addr = "" - initial_upstreams = [":9201"] + initial_upstreams = [":9202"] auth_storage_path = "/etc/boundary.d/auth_storage/" tags { type = ["worker2", "intermediate"] @@ -212,7 +227,7 @@ disable_mlock = true # listener denoting this is a worker proxy listener "tcp" { - address = "0.0.0.0:9201" + address = "0.0.0.0:9202" purpose = "proxy" } @@ -220,7 +235,7 @@ listener "tcp" { # worker service worker { public_addr = "" - initial_upstreams = [":9201"] + initial_upstreams = [":9202"] auth_storage_path = "/etc/boundary.d/auth_storage/" tags { type = ["worker3", "egress"] @@ -285,8 +300,122 @@ Boundary only supports memory locking on UNIX-like systems that support `mlock() `LimitMEMLOCK=infinity` - `listener` - Configures the listeners on which Boundary serves traffic (API cluster and proxy). -- `controller` - Configures the controller. -If present, `boundary server` starts a controller subprocess. +- `worker` - Configures the worker. +If present, `boundary server` starts a worker subprocess. +- `events` - Configures event-specific parameters. + + The example events configuration above is exhaustive and writes all events to both `stderr` and a file. + This configuration may or may not work for your organization's logging solution. + +- `kms` - Configures KMS blocks for [various purposes](/boundary/docs/concepts/security/data-encryption). + + Refer to the links below for configuration information for the different cloud KMS blocks: + + - [AWS](/boundary/docs/configuration/kms/awskms) + - [Azure](/boundary/docs/configuration/kms/azurekeyvault) + - [GCP](/boundary/docs/configuration/kms/gcpckms) + - [OCI](/boundary/docs/configuration/kms/ocikms) + - [AliCloud](/boundary/docs/configuration/kms/alicloudkms) + - [Vault Transit](/boundary/docs/configuration/kms/transit) + +Refer to the documentation for additional [top-level configuration options](/boundary/docs/configuration) and additional [controller-specific options](/boundary/docs/configuration/controller). + + + + +For the Community Edition of Boundary, you must configure a worker server that communicates with the controller and is responsible for storage-related tasks. + +Boundary Community Edition only supports egress workers. To configure workers for multi-hop sessions, check the Boundary Enterprise configuration. + +Complete the following steps to configure the worker. + +### Worker configuration + +Create the `egress-worker.hcl` file with the relevant configuration information: + + + +```hcl +# disable memory from being swapped to disk +disable_mlock = true + +# listener denoting this is a worker proxy +listener "tcp" { + address = "0.0.0.0:9202" + purpose = "proxy" +} + +# worker block for configuring the specifics of the +# worker service +worker { + public_addr = "" + initial_upstreams = [":9201"] + auth_storage_path = "/etc/boundary.d/auth_storage/" + tags { + type = ["worker", "egress"] + } +} + +# Events (logging) configuration. This +# configures logging for ALL events to both +# stderr and a file at /var/log/boundary/.log +events { + audit_enabled = true + sysevents_enabled = true + observations_enable = true + sink "stderr" { + name = "all-events" + description = "All events sent to stderr" + event_types = ["*"] + format = "cloudevents-json" + } + sink { + name = "file-sink" + description = "All events sent to a file" + event_types = ["*"] + format = "cloudevents-json" + file { + path = "/var/log/boundary" + file_name = "egress-worker.log" + } + audit_config { + audit_filter_overrides { + sensitive = "redact" + secret = "redact" + } + } + } +} + +# kms block for encrypting the authentication PKI material +kms "awskms" { + purpose = "worker-auth-storage" + region = "us-east-1" + kms_key_id = "19ec80b0-dfdd-4d97-8164-c6examplekey5" + endpoint = "https://vpce-0e1bb1852241f8cc6-pzi0do8n.kms.us-east-1.vpce.amazonaws.com" +} +``` + + + +Refer to the list below for explanations of the parameters used in the example above: + +- `disable mlock (bool: false)` - Disables the server from executing the `mlock` syscall, which prevents memory from being swapped to the disk. +This is fine for local development and testing. +However, it is not recommended for production unless the systems running Boundary use only encrypted swap or do not use swap at all. +Boundary only supports memory locking on UNIX-like systems that support `mlock()` syscall like Linux and FreeBSD. + + On Linux, to give the Boundary executable the ability to use `mlock` syscall without running the process as root, run the following command: + + `sudo setcap cap_ipc_lock=+ep $(readlink -f $(which boundary))` + + If you use a Linux distribution with a modern version of systemd, you can add the following directive to the **"[Service]"** configuration section: + + `LimitMEMLOCK=infinity` + +- `listener` - Configures the listeners on which Boundary serves traffic (API cluster and proxy). +- `worker` - Configures the worker. +If present, `boundary server` starts a worker subprocess. - `events` - Configures event-specific parameters. The example events configuration above is exhaustive and writes all events to both `stderr` and a file. @@ -305,6 +434,10 @@ If present, `boundary server` starts a controller subprocess. Refer to the documentation for additional [top-level configuration options](/boundary/docs/configuration) and additional [controller-specific options](/boundary/docs/configuration/controller). + + + + ## Start the Boundary service When the configuration files are in place on each Boundary controller, you can proceed to enable and start the binary on each of the Boundary worker nodes using `systemd`. diff --git a/website/content/docs/install-boundary/index.mdx b/website/content/docs/install-boundary/index.mdx index 0d54f5a2de7..47f335d249c 100644 --- a/website/content/docs/install-boundary/index.mdx +++ b/website/content/docs/install-boundary/index.mdx @@ -8,7 +8,7 @@ description: |- # Overview This section details installing Boundary in a self-managed environment. -You can use the topics in this section to install the open source version or the Enterprise version of Boundary. +You can use the topics in this section to install the Community Edition or the Enterprise version of Boundary. The section also includes reference architecture, system requirement recommendations, and best practices. To deploy HCP Boundary instead, refer to the [HCP Boundary Get Started section](/hcp/docs/get-started/deploy-and-login). \ No newline at end of file diff --git a/website/content/docs/install-boundary/install.mdx b/website/content/docs/install-boundary/install.mdx index 400e489031c..b4bd7f9e20c 100644 --- a/website/content/docs/install-boundary/install.mdx +++ b/website/content/docs/install-boundary/install.mdx @@ -7,25 +7,118 @@ description: |- # Install Boundary -This guide outlines the required steps to manually install and configure a single HashiCorp Boundary cluster as defined in the [Recommended architecture](/boundary/docs/install-boundary/recommended-architecture) topic. -It assumes you install Boundary on virtual machines (VMs) or bare-metal servers running a Debian or Red Hat-based Linux distribution. +This guide outlines the required steps to manually install and configure a +single HashiCorp Boundary cluster as defined in the [Recommended +architecture](/boundary/docs/install-boundary/recommended-architecture) topic. +It assumes you install Boundary on virtual machines (VMs) or bare-metal servers +running a Debian or Red Hat-based Linux distribution. + +This document includes general guidance as well as specific recommendations for +popular cloud infrastructure platforms. These recommendations have also been +encoded into official Terraform reference architectures for +[AWS](https://github.com/hashicorp/boundary-reference-architecture/tree/main/deployment/aws), +[Azure](https://github.com/hashicorp/boundary-reference-architecture/tree/main/deployment/azure), +and +[GCP](https://github.com/hashicorp/boundary-reference-architecture/tree/main/deployment/gcp). + +Pre-built Boundary packages are available from the [HashiCorp Linux +Repository](https://www.hashicorp.com/blog/announcing-the-hashicorp-linux-repository). +In addition to installing the Boundary binary, the official package also +provides a systemd service unit, and a local `boundary` user account under which +the service runs. + +You must complete the following steps for each Boundary controller and worker +node that you want to deploy. The binary operates as either a worker or +controller, depending on the subsequent configuration that you generate for the +Boundary binary. -This document includes general guidance as well as specific recommendations for popular cloud infrastructure platforms. -These recommendations have also been encoded into official Terraform reference architectures for [AWS](https://github.com/hashicorp/boundary-reference-architecture/tree/main/deployment/aws), [Azure](https://github.com/hashicorp/boundary-reference-architecture/tree/main/deployment/azure), and [GCP](https://github.com/hashicorp/boundary-reference-architecture/tree/main/deployment/gcp). +The steps vary by Linux distribution. -Pre-built Boundary packages are available from the [HashiCorp Linux Repository](https://www.hashicorp.com/blog/announcing-the-hashicorp-linux-repository). -In addition to installing the Boundary binary, the official package also provides a systemd service unit, and a local `boundary` user account under which the service runs. +Select your distribution of Boundary, and complete the steps to install the +binary: -You must complete the following steps for each Boundary controller and worker node that you want to deploy. -The binary operates as either a worker or controller, depending on the subsequent configuration that you generate for the Boundary binary. + + + + -The steps vary by Linux distribution. -Select your distribution, and complete the steps to install Boundary. +1. Use the following command to add the HashiCorp GPC key as a trusted + package-signing key: + + ```shell-session + $ curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - + ``` +1. Add the official HashiCorp Linux repository: + + ```shell-session + $ sudo apt-add-repository "deb [arch=amd64] https://apt.releases.hashicorp.com $(lsb_release -cs) main" + ``` + +1. Update the package index: + + ```shell-session + $ sudo apt update + ``` +1. Install Boundary Enterprise: + + ```shell-session + $ sudo apt install boundary-enterprise + ``` + + + + +1. Use the following command to install `yum-config-manager` to manage your + repositories: + + ```shell-session + $ sudo yum install -y yum-utils + ``` + +2. Add the official HashiCorp Linux repository: + + ```shell-session + $ sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo + ``` + +3. Install Boundary Enterprise: + + ```shell-session + $ sudo yum -y install boundary-enterprise + ``` + + + + +1. Use the following command to install `yum-config-manager` to manage your + repositories: + + ```shell-session + $ sudo yum install -y yum-utils shadow-utils + ``` + +2. Add the official HashiCorp Linux repository: + + ```shell-session + $ sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + ``` + +3. Install Boundary Enterprise: + + ```shell-session + $ sudo yum -y install boundary-enterprise + ``` + + + + + -1. Use the following command to add the HashiCorp GPC key as a trusted package-signing key: +1. Use the following command to add the HashiCorp GPC key as a trusted + package-signing key: ```shell-session $ curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add - @@ -42,7 +135,7 @@ Select your distribution, and complete the steps to install Boundary. $ sudo apt update ``` -1. Install Boundary: +1. Install Boundary Community Edition: ```shell-session $ sudo apt install boundary @@ -51,7 +144,8 @@ Select your distribution, and complete the steps to install Boundary. -1. Use the following command to install `yum-config-manager` to manage your repositories: +1. Use the following command to install `yum-config-manager` to manage your + repositories: ```shell-session $ sudo yum install -y yum-utils @@ -63,11 +157,36 @@ Select your distribution, and complete the steps to install Boundary. $ sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo ``` -3. Install Boundary: +3. Install Boundary Community Edition: ```shell-session - $ sudo yum -y install boundary-enterprise + $ sudo yum -y install boundary + ``` + + + + + +1. Use the following command to install `yum-config-manager` to manage your + repositories: + + ```shell-session + $ sudo yum install -y yum-utils shadow-utils ``` +2. Add the official HashiCorp Linux repository: + + ```shell-session + $ sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + ``` + +3. Install Boundary Community Edition: + + ```shell-session + $ sudo yum -y install boundary + ``` + + + \ No newline at end of file diff --git a/website/content/docs/install-boundary/system-requirements.mdx b/website/content/docs/install-boundary/system-requirements.mdx index 57f9396e7b8..a6ee6ece015 100644 --- a/website/content/docs/install-boundary/system-requirements.mdx +++ b/website/content/docs/install-boundary/system-requirements.mdx @@ -100,16 +100,25 @@ If any of the default port mappings below do not meet your organization's requir | Load balancer | Controller servers | 9203 | tcp | Health checks | | Worker servers | Controller load balancer | 9201 | tcp | Session authorization, credentials, etc | | Controllers | Postgres | 5432 | tcp | Storing system state | -| Client machines | Worker servers \* | 9202 | tcp | Session proxing | -| Worker servers | Boundary targets\* | various | tcp | Session proxing | -| Client machines | Ingress worker servers \*\* | 9202 | tcp | Multi-hop session proxing | -| Egress workers | Ingress worker servers \*\* | 9202 | tcp | Multi-hop session proxing | -| Egress workers | Boundary targets \*\* | various | tcp | Multi-hop session proxing | +| Client machines | Worker servers \* | 9202 | tcp | Session proxying | +| Worker servers | Boundary targets\* | various | tcp | Session proxying | +| Client machines | Ingress worker servers \*\* | 9202 | tcp | Multi-hop session proxying | +| Egress workers | Ingress worker servers \*\* | 9202 | tcp | Multi-hop session proxying | +| Egress workers | Boundary targets \*\* | various | tcp | Multi-hop session proxying | \* In this scenario, the client connects directly to one worker, which then proxies the connection to the Boundary target. -** In this scenario, the client connects to an ingress worker, then the ingress worker connects to a downstream egress worker, then the downstream egress worker connects to the Boundary target. -Ingress and egress workers can be chained together further to provide multiple layers of session proxying. +** Multi-hop sessions use reverse proxy tunnels that are initiated when the downstream workers start and connect to upstream workers. +There are four steps when a client requests to connect to a target that requires a multi-hop path: + +1. The client connects to an ingress worker. +1. The ingress worker connects with the next-downstream or egress worker through that worker's established reverse-proxy tunnel. + + This step may be repeated multiple times if there are multiple worker connections in the middle of the tunnel chain. +1. The final downstream egress worker connects to the Boundary target. +1. The client can now send data through the tunnel chain. + +A multi-hop worker that acts as a downstream can initiate a tunnel to one or multiple upstream workers, specified in the `initial_upstreams` list in the worker configuration. ## Network traffic encryption diff --git a/website/content/docs/operations/manage-recorded-sessions.mdx b/website/content/docs/operations/manage-recorded-sessions.mdx deleted file mode 100644 index 7770527a344..00000000000 --- a/website/content/docs/operations/manage-recorded-sessions.mdx +++ /dev/null @@ -1,80 +0,0 @@ ---- -layout: docs -page_title: Manage recorded sessions -description: |- - How to download, view, and validate Boundary's recorded sessions ---- - -# Manage recorded sessions - -Boundary provides [auditing](/boundary/docs/concepts/auditing) capabilities via [session recording](/boundary/docs/configuration/session-recording). -In Boundary, a session represents a set of connections between a user and a host from a target. -The session begins when an authorized user requests access to a target, and it ends when that access is terminated. -When you enable session recording on a target, any user session that connects to the target is automatically recorded. -An administrator can later view the recordings to investigate security issues, review system activity, or perform regular assessments of security policies and procedures. - -## Find recorded sessions - -Recorded sessions are stored in an external storage bucket that you create. -Storing session recordings in a system external to Boundary means those recordings can be accessed, modified, deleted, and even restored independently of Boundary. -You can view any sessions that Boundary recorded in your storage provider or via the CLI. - -You can view a list of all recorded sessions, or if you know the ID of a specific recorded session, you can find any channels associated with that recording. - -### Find all recorded sessions - -Complete the following steps to find all recorded sessions using the CLI. - -1. Authenticate to Boundary in the CLI. -1. Type the following command to view a list of all recorded sessions: - - ```bash - boundary session-recordings list -recursive - ``` - - Boundary displays a list of all recorded sessions by scope. - -### Find a specific recorded session by ID - -If you have the ID of a recorded session, you can use the following command to list the connections and channels associated with a session recording. - -```bash -boundary session-recordings read -id -``` - -## View recorded sessions - -You can view recorded sessions in the UI, if you have the proper permissions. - -1. Log in to Boundary. -1. Select **Orgs** in the navigation pane. -1. Select the org that contains the target from the recorded session you want to view. -1. Select **Session Recordings** in the navigation pane. - - The **Session Recordings** page displays the time, user, target, and duration of the recording. -1. Select **View** next to the recording you want to view. -1. Select **Play** next to the channel recording you want to view. - - The recorded session appears in the media plyaer. - You can click the **Play** button to watch the recording. - -## Download recorded session channels - -You can download a recording of SSH shell or exec sessions for a channel from a recorded session. -The channel ID is required for the download. - -If you have the ID of a recorded session, you can use the following command to list the connections and channels associated with a session recording. - -```bash -boundary session-recordings read -id -``` - -Complete the following steps to download a recorded session channel: - -1. Authenticate to Boundary in the CLI. -1. Type the following command to download a recorded session channel. -Substitute the ID of the channel for **chr_1234567890**: - - ```bash - boundary session-recordings download -id chr_1234567890 - ``` diff --git a/website/content/docs/operations/session-recordings/index.mdx b/website/content/docs/operations/session-recordings/index.mdx new file mode 100644 index 00000000000..d748ad80fc1 --- /dev/null +++ b/website/content/docs/operations/session-recordings/index.mdx @@ -0,0 +1,24 @@ +--- +layout: docs +page_title: Recorded sessions operations +description: |- + How to work with Boundary's recorded sessions +--- + +# Recorded sessions operations + +Boundary provides [auditing](/boundary/docs/concepts/auditing) capabilities via [session recording](/boundary/docs/configuration/session-recording). +In Boundary, a session represents a set of connections between a user and a host from a target. +The session begins when an authorized user requests access to a target, and it ends when that access is terminated. +When you enable session recording on a target, any user session that connects to the target is automatically recorded. +An administrator can later view the recordings to investigate security issues, review system activity, or perform regular assessments of security policies and procedures. + +Recorded sessions are stored in an external storage bucket that you create. +Storing session recordings in a system external to Boundary means those recordings can be accessed, modified, deleted, and even restored independently of Boundary. +You can view any sessions that Boundary recorded in your storage provider or via the CLI. + +For more information about working with recorded sessions, refer to the following topics: + +- [Find and view recorded sessions](/boundary/docs/operations/session-recordings/manage-recorded-sessions) +- [Validate the integrity of session recordings](/boundary/docs/operations/session-recordings/validate-session-recordings) +- [How Boundary validates data integrity in the external data store](/boundary/docs/operations/session-recordings/validate-data-store) \ No newline at end of file diff --git a/website/content/docs/operations/session-recordings/manage-recorded-sessions.mdx b/website/content/docs/operations/session-recordings/manage-recorded-sessions.mdx new file mode 100644 index 00000000000..decaed7b8fd --- /dev/null +++ b/website/content/docs/operations/session-recordings/manage-recorded-sessions.mdx @@ -0,0 +1,90 @@ +--- +layout: docs +page_title: Manage recorded sessions +description: |- + How to find, download, and view Boundary's recorded sessions +--- + +# Find and view recorded sessions + +You can view a list of all recorded sessions, or if you know the ID of a specific recorded session, you can find any channels associated with that recording. + + + + +## Find all recorded sessions + +Complete the following steps to find all recorded sessions using the CLI. + +1. Authenticate to Boundary in the CLI. +1. Type the following command to view a list of all recorded sessions: + + ```bash + boundary session-recordings list -recursive + ``` + + Boundary displays a list of all recorded sessions by scope. + +## Find a specific recorded session by ID + +If you have the ID of a recorded session, you can use the following command to list the connections and channels associated with a session recording. + +```bash +boundary session-recordings read -id +``` + +## Download recorded session channels + +You can download a recording of SSH shell or exec sessions for a channel from a recorded session. +The channel ID is required for the download. + +If you have the ID of a recorded session, you can use the following command to list the connections and channels associated with a session recording. + +```bash +boundary session-recordings read -id +``` + +Complete the following steps to download a recorded session channel: + +1. Authenticate to Boundary in the CLI. +1. Type the following command to download a recorded session channel. +Substitute the ID of the channel for **chr_1234567890**: + + ```bash + boundary session-recordings download -id chr_1234567890 + ``` + + + + +## Find all recorded sessions + +You can find all recorded sessions in the UI from the global scope. + +1. Log in to Boundary. +1. Select **Session Recordings** in the navigation pane. + + The **Session Recordings** page displays the created time, user, project, + target, and duration of the recording. + +## View session recording details + +1. Select **View** next to the session recording you want to view. + + The details page has information related to the session recording and + links to related inforation like user, target, and storage bucket. + +## Play back channel recording + +1. Select **Play** next to the channel recording you want to view. + + The recorded session appears in the media player. + Click the **Play** button located at the bottom of the media player + to watch the recording. + + If a recorded session channel does not support playback, a **View** button + is shown. The playback page displays a message stating that playback + is not supported, but still shows details specific to that channel. + + + diff --git a/website/content/docs/operations/session-recordings/validate-data-store.mdx b/website/content/docs/operations/session-recordings/validate-data-store.mdx new file mode 100644 index 00000000000..7eb0dda0608 --- /dev/null +++ b/website/content/docs/operations/session-recordings/validate-data-store.mdx @@ -0,0 +1,17 @@ +--- +layout: docs +page_title: Validate the data integrity in the external object store +description: |- + How Boundary validates the data integrity of recorded sessions in the external object store +--- +# How Boundary validates data integrity in the external object store + +When a Boundary worker uploads a BSR file to AWS S3 through the Boundary AWS plugin, the plugin calculates the SHA256 checksum of the contents of the BSR file and attaches this information to the object that is uploaded to S3. +The SHA256 checksum value attached to the S3 object is returned to the Boundary worker. +The Boundary worker calculates the SHA256 checksum value of the BSR file's content from +local disk and compares it to the plugin value. + +This process ensures that no tampering of BSR files occurs between the worker, plugin, and S3. +The SHA256 checksum value generated by the plugin is not a part of the BSR file structure and should not be confused with how Boundary cryptographically verifies the BSR directory's contents. + +For more information, refer to the [overview of configuring session recording](/boundary/docs/configuration/session-recording). \ No newline at end of file diff --git a/website/content/docs/operations/session-recordings/validate-session-recordings.mdx b/website/content/docs/operations/session-recordings/validate-session-recordings.mdx new file mode 100644 index 00000000000..fb0913ed16c --- /dev/null +++ b/website/content/docs/operations/session-recordings/validate-session-recordings.mdx @@ -0,0 +1,57 @@ +--- +layout: docs +page_title: Validate the integrity recorded sessions +description: |- + How to validate the integrity of Boundary's recorded sessions +--- + +# Validate the integrity of session recordings + +BSR directories are validated based on the contents in the directory. +Boundary cryptographically verifies each individual Boundary Session Recording (BSR) file. +The keys used for verifying all Boundary Session Recording files are written to storage and wrapped by the KMS you configured. +Each session recording has its own individual key. +Boundary generates the following keys when a session recording is authorized: + +- The BSR key is a plaintext AES-GCM key. +It is not uploaded to the external object store. + +- The private and public key pair is a ed25519 key pair. +The key pair is not uploaded to the external object store. + +The following files are stored in the BSR file structure to ensure the integrity of a session recording: + +- `bsrKey.pub` is the public ed25519 key. +- `wrappedBsrKey` is the BSR key wrapped by the external KMS AES-GCM key that you configure. +- `wrappedPrivKey` is the private ed25519 key wrapped by the external KMS AES-GCM key that you configure. +- `pubKeySelfSignature.sign` is a self-signature of the plaintext public ed25519 key created with its private key. +- `pubKeyBsrSignature.sign` is a signature of the plaintext public ed25519 key created with the BSR key. +- `SHA256SUM.sig` is a signature of the plaintext `SHA256SUM` file created with the private key. + +Encrypting the BSR key with an external KMS means that Boundary is not responsible for the longevity of the keys. +The Boundary admin can always use that external KMS to unwrap the `wrappedBsrKey` and `wrappedPrivKey`. +A BSR’s key is encrypted using the `go-kms-wrapping` package, and therefore the encrypted BlobInfo includes the metadata required to identify the key-version used during encryption. +So if the wrapper is reinitialized properly, you can unwrap the keys even if the key has been rotated. + +Each BSR directory contains a SHA256SUM and SHA256SUM.sig file that you can use to cryptographically verify the BSR directory's contents. +The SHA256SUM file contains rows of file names paired with a checksum for the file contents. +The SHA256SUM.sig is a copy of the SHA256SUM file, signed with the BSR's private key. +Refer to the following example of a SHA256SUM file: + + ``` + dc8ce2c42553ce510197c99efe21d89d6229feb4b49170511f49965f2e3cf1a3 wrappedBsrKey + a5a91b1b52fb53c4bab661b2e5846bb2a836f050e3d745e436078871914a0bc2 wrappedPrivKey + 1ca281852ec0d447b94708f28a51b562d47b84affdba25e13a97b0fbd9126424 pubKeyBsrSignature.sign + 7b5e18e5930bb4cce12a3f203328d9065cae29f26aba3963bb5faece2cf97231 pubKeySelfSignature.sign + dc7c6b1316624c7c486a22bab157f947df92b9f2ce4a72469b1f335399a043d5 bsrKey.pub + 4d3966c458f4e5d67f9ac70b804540b927c718965267c3f36526bf0b18c40ad9 session-meta.json + 6fec2173d331828677fb5e77fc19168daad3c5f0e82517a82e5701e6c2bdcbe1 session-recording.meta + ad76483e7cf3e65391a3a1d0b86a3ad436333ee225bea042b13900abc188b226 session-recording-summary.json + ``` + +Follow these steps to validate a session recording: + +1. Unwrap `wrappedBsrKey` using the external KMS you configured to retrieve the BSR key. +2. Unwrap `wrappedPrivKey` using the external KMS you configured to retrieve the private key. +3. Use the BSR key or the private key to verify the `bsrKey.pub` key using `go-kms-wrapping` HmacSha256(...). +4. When the key is verified, use the `bsrKey.pub` key to verify the BSR SHA256SUM file using `go-kms-wrapping` ed25519.Sign(...). \ No newline at end of file diff --git a/website/content/docs/oss/index.mdx b/website/content/docs/oss/index.mdx index 1efd56635b9..a2ba90fdcf2 100644 --- a/website/content/docs/oss/index.mdx +++ b/website/content/docs/oss/index.mdx @@ -1,26 +1,26 @@ --- layout: docs -page_title: OSS Boundary -description: An introduction to OSS Boundary +page_title: Boundary Community Edition +description: An introduction to Boundary Community Edition --- -# Boundary OSS +# Boundary Community Edition Boundary provides secure remote access to critical systems with fine-grained authorizations based on trusted identities. While HCP Boundary provides a -managed solution, Boundary OSS is a self-managed distribution that organizations +managed solution, Boundary Community Edition is a self-managed distribution that organizations can deploy on-premise to manage access to infrastructure endpoints. The source -code for Boundary OSS is freely available on GitHub. The following sections +code for Boundary Community Edition is freely available on GitHub. The following sections contain information on the use case for Boundary and how to install and configure self-managed Boundary environments. Traditional approaches like SSH bastion hosts or VPNs require distributing and managing credentials, configuring network controls like firewalls, and exposing -the private network. Boundary OSS provides secure access to hosts and critical +the private network. Boundary Community Edition provides secure access to hosts and critical systems without having to manage credentials or expose your network, and is -entirely open source. +free. -Boundary OSS is designed to be straightforward to understand, highly scalable, +Boundary Community Edition is designed to be straightforward to understand, highly scalable, and resilient. It can run in clouds, on-premise, or within secure enclaves. Boundary does not require an agent to be installed on the end host. diff --git a/website/content/docs/overview/what-is-boundary.mdx b/website/content/docs/overview/what-is-boundary.mdx index 5f7c3f8ce99..f7dbf3f7775 100644 --- a/website/content/docs/overview/what-is-boundary.mdx +++ b/website/content/docs/overview/what-is-boundary.mdx @@ -44,7 +44,7 @@ The core Boundary workflow consists of four stages: - **HCP Boundary**: a managed Boundary offering with commercial features. HashiCorp hosts Boundary's control plane and you have the option of running private workers within your environment. - **Boundary Enterprise**: a self-managed Boundary offering with full feature parity to HCP Boundary. -- **Boundary OSS**: an open source, self-managed version of Boundary. +- **Boundary Community Edition**: a free, self-managed version of Boundary. If you're not sure which edition is right for you, we recommend [**HCP Boundary**](https://developer.hashicorp.com/boundary/tutorials/hcp-getting-started) because it eliminates deployment operations. diff --git a/website/content/docs/release-notes/v0_13_0.mdx b/website/content/docs/release-notes/v0_13_0.mdx index bb99d325633..ec419eda77c 100644 --- a/website/content/docs/release-notes/v0_13_0.mdx +++ b/website/content/docs/release-notes/v0_13_0.mdx @@ -31,7 +31,7 @@ Highlights include: **Boundary Enterprise**: Organizations are now able to deploy Boundary Enterprise within their self-managed infrastructure to let their teams securely access hosts and services consistently across any environment. The introduction of Boundary Enterprise allows organizations in highly regulated industries who are prohibited from adopting cloud based solutions to leverage Boundary's secure remote access functionalities. -Boundary Enterprise has the same feature set as HCP Boundary and seamless migrations from OSS deployments are supported as well. +Boundary Enterprise has the same feature set as HCP Boundary and seamless migrations from Community Edition deployments are supported as well. For more information, refer to [Boundary Enterprise](/boundary/docs/enterprise). diff --git a/website/content/docs/troubleshoot/faq.mdx b/website/content/docs/troubleshoot/faq.mdx index b9b8e1b4ffe..9d1c300a80b 100644 --- a/website/content/docs/troubleshoot/faq.mdx +++ b/website/content/docs/troubleshoot/faq.mdx @@ -25,7 +25,7 @@ value proposition for identity-based access.There are three primary points of in [tutorials](/boundary/tutorials/access-management/oss-vault-cred-brokering-quickstart) of this scenario. 2. Boundary can use Vault as an OIDC provider to enable sign-in with Vault's supported auth methods (even non-OIDC auth methods like Active Directory kerberos/LDAP). This scenario is walked through in this [tutorial](/vault/tutorials/auth-methods/oidc-identity-provider). -3. OSS Boundary can use Vault as the external KMS that serves as Boundary's root of trust. More information on this use case can be found +3. Boundary Community Edition can use Vault as the external KMS that serves as Boundary's root of trust. More information on this use case can be found [here](/boundary/docs/configuration/kms/transit). ## Q: What identity providers does Boundary support? @@ -58,7 +58,7 @@ For more information on dynamic host catalogs, please see: **Session Logging/Monitoring:** Supported. Boundary creates a session log of all sessions created between identities and targets that have been onboarded to Boundary. You can learn how to monitor these sessions in this [tutorial](/boundary/tutorials/getting-started/getting-started-connect#manage-sessions). -Boundary supports audit logs for [Boundary OSS](/boundary/tutorials/oss-configuration/event-logging) and [audit log streaming](/hcp/docs/boundary/audit-logging) +Boundary supports audit logs for [Boundary Community Edition](/boundary/tutorials/oss-configuration/event-logging) and [audit log streaming](/hcp/docs/boundary/audit-logging) for HCP Boundary. Audit logs for both distributions can be exported to SIEM or BI tools. **Session Termination:** Supported. Session termination for Boundary administrators is a supported capability, as demonstrated in this diff --git a/website/data/docs-nav-data.json b/website/data/docs-nav-data.json index edb6be9603e..3021317923b 100644 --- a/website/data/docs-nav-data.json +++ b/website/data/docs-nav-data.json @@ -133,12 +133,12 @@ "path": "concepts" }, { - "title": "Identity and access management", - "path": "concepts/iam" + "title": "Workers", + "path": "concepts/workers" }, { - "title": "Host discovery", - "path": "concepts/host-discovery" + "title": "Identity and access management", + "path": "concepts/iam" }, { "title": "Credential management", @@ -148,6 +148,23 @@ "title": "Auditing", "path": "concepts/auditing" }, + { + "title": "Host discovery", + "routes": [ + { + "title": "Overview", + "path": "concepts/host-discovery" + }, + { + "title": "AWS dynamic hosts", + "path": "concepts/host-discovery/aws" + }, + { + "title": "Azure dynamic hosts", + "path": "concepts/host-discovery/azure" + } + ] + }, { "title": "Security", "routes": [ @@ -437,7 +454,24 @@ }, { "title": "Session recordings", - "path": "operations/manage-recorded-sessions" + "routes": [ + { + "title": "Overview", + "path": "operations/session-recordings" + }, + { + "title": "Find and view recorded sessions", + "path": "operations/session-recordings/manage-recorded-sessions" + }, + { + "title": "Validate recorded sessions", + "path": "operations/session-recordings/validate-session-recordings" + }, + { + "title": "Validate external data store", + "path": "operations/session-recordings/validate-data-store" + } + ] } ] }, @@ -543,7 +577,7 @@ "divider": true }, { - "title": "Boundary OSS", + "title": "Boundary Community Edition", "hidden": true, "routes": [ { diff --git a/website/public/img/access-model.png b/website/public/img/access-model.png new file mode 100644 index 00000000000..eada43e22af Binary files /dev/null and b/website/public/img/access-model.png differ diff --git a/website/public/img/concepts-multihop.png b/website/public/img/concepts-multihop.png new file mode 100644 index 00000000000..c7d7c4a5cce Binary files /dev/null and b/website/public/img/concepts-multihop.png differ diff --git a/website/public/img/worker-tags.png b/website/public/img/worker-tags.png new file mode 100644 index 00000000000..b73e4b48f28 Binary files /dev/null and b/website/public/img/worker-tags.png differ diff --git a/website/redirects.js b/website/redirects.js index 3b6ed31bed8..46e3e79853e 100644 --- a/website/redirects.js +++ b/website/redirects.js @@ -133,4 +133,9 @@ module.exports = [ destination: '/boundary/docs/concepts/host-discovery', permanent: true, }, + { + source: '/boundary/docs/operations/manage-recorded-sessions', + destination: '/boundary/docs/operations/session-recordings', + permanent: true, + }, ]